mirror of
https://github.com/astral-sh/uv.git
synced 2025-07-07 13:25:00 +00:00
chore: Move all integration tests to a single binary (#8093)
As per https://matklad.github.io/2021/02/27/delete-cargo-integration-tests.html Before that, there were 91 separate integration tests binary. (As discussed on Discord — I've done the `uv` crate, there's still a few more commits coming before this is mergeable, and I want to see how it performs in CI and locally).
This commit is contained in:
parent
fce7a838e9
commit
715f28fd39
231 changed files with 15585 additions and 15507 deletions
|
@ -3,6 +3,9 @@ name = "uv-auth"
|
|||
version = "0.0.1"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -215,77 +215,4 @@ impl TrieState {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_trie() {
|
||||
let credentials1 = Arc::new(Credentials::new(
|
||||
Some("username1".to_string()),
|
||||
Some("password1".to_string()),
|
||||
));
|
||||
let credentials2 = Arc::new(Credentials::new(
|
||||
Some("username2".to_string()),
|
||||
Some("password2".to_string()),
|
||||
));
|
||||
let credentials3 = Arc::new(Credentials::new(
|
||||
Some("username3".to_string()),
|
||||
Some("password3".to_string()),
|
||||
));
|
||||
let credentials4 = Arc::new(Credentials::new(
|
||||
Some("username4".to_string()),
|
||||
Some("password4".to_string()),
|
||||
));
|
||||
|
||||
let mut trie = UrlTrie::new();
|
||||
trie.insert(
|
||||
&Url::parse("https://burntsushi.net").unwrap(),
|
||||
credentials1.clone(),
|
||||
);
|
||||
trie.insert(
|
||||
&Url::parse("https://astral.sh").unwrap(),
|
||||
credentials2.clone(),
|
||||
);
|
||||
trie.insert(
|
||||
&Url::parse("https://example.com/foo").unwrap(),
|
||||
credentials3.clone(),
|
||||
);
|
||||
trie.insert(
|
||||
&Url::parse("https://example.com/bar").unwrap(),
|
||||
credentials4.clone(),
|
||||
);
|
||||
|
||||
let url = Url::parse("https://burntsushi.net/regex-internals").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials1));
|
||||
|
||||
let url = Url::parse("https://burntsushi.net/").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials1));
|
||||
|
||||
let url = Url::parse("https://astral.sh/about").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials2));
|
||||
|
||||
let url = Url::parse("https://example.com/foo").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials3));
|
||||
|
||||
let url = Url::parse("https://example.com/foo/").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials3));
|
||||
|
||||
let url = Url::parse("https://example.com/foo/bar").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials3));
|
||||
|
||||
let url = Url::parse("https://example.com/bar").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials4));
|
||||
|
||||
let url = Url::parse("https://example.com/bar/").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials4));
|
||||
|
||||
let url = Url::parse("https://example.com/bar/foo").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials4));
|
||||
|
||||
let url = Url::parse("https://example.com/about").unwrap();
|
||||
assert_eq!(trie.get(&url), None);
|
||||
|
||||
let url = Url::parse("https://example.com/foobar").unwrap();
|
||||
assert_eq!(trie.get(&url), None);
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
72
crates/uv-auth/src/cache/tests.rs
vendored
Normal file
72
crates/uv-auth/src/cache/tests.rs
vendored
Normal file
|
@ -0,0 +1,72 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_trie() {
|
||||
let credentials1 = Arc::new(Credentials::new(
|
||||
Some("username1".to_string()),
|
||||
Some("password1".to_string()),
|
||||
));
|
||||
let credentials2 = Arc::new(Credentials::new(
|
||||
Some("username2".to_string()),
|
||||
Some("password2".to_string()),
|
||||
));
|
||||
let credentials3 = Arc::new(Credentials::new(
|
||||
Some("username3".to_string()),
|
||||
Some("password3".to_string()),
|
||||
));
|
||||
let credentials4 = Arc::new(Credentials::new(
|
||||
Some("username4".to_string()),
|
||||
Some("password4".to_string()),
|
||||
));
|
||||
|
||||
let mut trie = UrlTrie::new();
|
||||
trie.insert(
|
||||
&Url::parse("https://burntsushi.net").unwrap(),
|
||||
credentials1.clone(),
|
||||
);
|
||||
trie.insert(
|
||||
&Url::parse("https://astral.sh").unwrap(),
|
||||
credentials2.clone(),
|
||||
);
|
||||
trie.insert(
|
||||
&Url::parse("https://example.com/foo").unwrap(),
|
||||
credentials3.clone(),
|
||||
);
|
||||
trie.insert(
|
||||
&Url::parse("https://example.com/bar").unwrap(),
|
||||
credentials4.clone(),
|
||||
);
|
||||
|
||||
let url = Url::parse("https://burntsushi.net/regex-internals").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials1));
|
||||
|
||||
let url = Url::parse("https://burntsushi.net/").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials1));
|
||||
|
||||
let url = Url::parse("https://astral.sh/about").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials2));
|
||||
|
||||
let url = Url::parse("https://example.com/foo").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials3));
|
||||
|
||||
let url = Url::parse("https://example.com/foo/").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials3));
|
||||
|
||||
let url = Url::parse("https://example.com/foo/bar").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials3));
|
||||
|
||||
let url = Url::parse("https://example.com/bar").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials4));
|
||||
|
||||
let url = Url::parse("https://example.com/bar/").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials4));
|
||||
|
||||
let url = Url::parse("https://example.com/bar/foo").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials4));
|
||||
|
||||
let url = Url::parse("https://example.com/about").unwrap();
|
||||
assert_eq!(trie.get(&url), None);
|
||||
|
||||
let url = Url::parse("https://example.com/foobar").unwrap();
|
||||
assert_eq!(trie.get(&url), None);
|
||||
}
|
|
@ -230,111 +230,4 @@ impl Credentials {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use insta::assert_debug_snapshot;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn from_url_no_credentials() {
|
||||
let url = &Url::parse("https://example.com/simple/first/").unwrap();
|
||||
assert_eq!(Credentials::from_url(url), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_url_username_and_password() {
|
||||
let url = &Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_username("user").unwrap();
|
||||
auth_url.set_password(Some("password")).unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
assert_eq!(credentials.username(), Some("user"));
|
||||
assert_eq!(credentials.password(), Some("password"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_url_no_username() {
|
||||
let url = &Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_password(Some("password")).unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
assert_eq!(credentials.username(), None);
|
||||
assert_eq!(credentials.password(), Some("password"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_url_no_password() {
|
||||
let url = &Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_username("user").unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
assert_eq!(credentials.username(), Some("user"));
|
||||
assert_eq!(credentials.password(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn authenticated_request_from_url() {
|
||||
let url = Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_username("user").unwrap();
|
||||
auth_url.set_password(Some("password")).unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
|
||||
let mut request = reqwest::Request::new(reqwest::Method::GET, url);
|
||||
request = credentials.authenticate(request);
|
||||
|
||||
let mut header = request
|
||||
.headers()
|
||||
.get(reqwest::header::AUTHORIZATION)
|
||||
.expect("Authorization header should be set")
|
||||
.clone();
|
||||
header.set_sensitive(false);
|
||||
|
||||
assert_debug_snapshot!(header, @r###""Basic dXNlcjpwYXNzd29yZA==""###);
|
||||
assert_eq!(Credentials::from_header_value(&header), Some(credentials));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn authenticated_request_from_url_with_percent_encoded_user() {
|
||||
let url = Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_username("user@domain").unwrap();
|
||||
auth_url.set_password(Some("password")).unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
|
||||
let mut request = reqwest::Request::new(reqwest::Method::GET, url);
|
||||
request = credentials.authenticate(request);
|
||||
|
||||
let mut header = request
|
||||
.headers()
|
||||
.get(reqwest::header::AUTHORIZATION)
|
||||
.expect("Authorization header should be set")
|
||||
.clone();
|
||||
header.set_sensitive(false);
|
||||
|
||||
assert_debug_snapshot!(header, @r###""Basic dXNlckBkb21haW46cGFzc3dvcmQ=""###);
|
||||
assert_eq!(Credentials::from_header_value(&header), Some(credentials));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn authenticated_request_from_url_with_percent_encoded_password() {
|
||||
let url = Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_username("user").unwrap();
|
||||
auth_url.set_password(Some("password==")).unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
|
||||
let mut request = reqwest::Request::new(reqwest::Method::GET, url);
|
||||
request = credentials.authenticate(request);
|
||||
|
||||
let mut header = request
|
||||
.headers()
|
||||
.get(reqwest::header::AUTHORIZATION)
|
||||
.expect("Authorization header should be set")
|
||||
.clone();
|
||||
header.set_sensitive(false);
|
||||
|
||||
assert_debug_snapshot!(header, @r###""Basic dXNlcjpwYXNzd29yZD09""###);
|
||||
assert_eq!(Credentials::from_header_value(&header), Some(credentials));
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
106
crates/uv-auth/src/credentials/tests.rs
Normal file
106
crates/uv-auth/src/credentials/tests.rs
Normal file
|
@ -0,0 +1,106 @@
|
|||
use insta::assert_debug_snapshot;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn from_url_no_credentials() {
|
||||
let url = &Url::parse("https://example.com/simple/first/").unwrap();
|
||||
assert_eq!(Credentials::from_url(url), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_url_username_and_password() {
|
||||
let url = &Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_username("user").unwrap();
|
||||
auth_url.set_password(Some("password")).unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
assert_eq!(credentials.username(), Some("user"));
|
||||
assert_eq!(credentials.password(), Some("password"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_url_no_username() {
|
||||
let url = &Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_password(Some("password")).unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
assert_eq!(credentials.username(), None);
|
||||
assert_eq!(credentials.password(), Some("password"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_url_no_password() {
|
||||
let url = &Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_username("user").unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
assert_eq!(credentials.username(), Some("user"));
|
||||
assert_eq!(credentials.password(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn authenticated_request_from_url() {
|
||||
let url = Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_username("user").unwrap();
|
||||
auth_url.set_password(Some("password")).unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
|
||||
let mut request = reqwest::Request::new(reqwest::Method::GET, url);
|
||||
request = credentials.authenticate(request);
|
||||
|
||||
let mut header = request
|
||||
.headers()
|
||||
.get(reqwest::header::AUTHORIZATION)
|
||||
.expect("Authorization header should be set")
|
||||
.clone();
|
||||
header.set_sensitive(false);
|
||||
|
||||
assert_debug_snapshot!(header, @r###""Basic dXNlcjpwYXNzd29yZA==""###);
|
||||
assert_eq!(Credentials::from_header_value(&header), Some(credentials));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn authenticated_request_from_url_with_percent_encoded_user() {
|
||||
let url = Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_username("user@domain").unwrap();
|
||||
auth_url.set_password(Some("password")).unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
|
||||
let mut request = reqwest::Request::new(reqwest::Method::GET, url);
|
||||
request = credentials.authenticate(request);
|
||||
|
||||
let mut header = request
|
||||
.headers()
|
||||
.get(reqwest::header::AUTHORIZATION)
|
||||
.expect("Authorization header should be set")
|
||||
.clone();
|
||||
header.set_sensitive(false);
|
||||
|
||||
assert_debug_snapshot!(header, @r###""Basic dXNlckBkb21haW46cGFzc3dvcmQ=""###);
|
||||
assert_eq!(Credentials::from_header_value(&header), Some(credentials));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn authenticated_request_from_url_with_percent_encoded_password() {
|
||||
let url = Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_username("user").unwrap();
|
||||
auth_url.set_password(Some("password==")).unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
|
||||
let mut request = reqwest::Request::new(reqwest::Method::GET, url);
|
||||
request = credentials.authenticate(request);
|
||||
|
||||
let mut header = request
|
||||
.headers()
|
||||
.get(reqwest::header::AUTHORIZATION)
|
||||
.expect("Authorization header should be set")
|
||||
.clone();
|
||||
header.set_sensitive(false);
|
||||
|
||||
assert_debug_snapshot!(header, @r###""Basic dXNlcjpwYXNzd29yZD09""###);
|
||||
assert_eq!(Credentials::from_header_value(&header), Some(credentials));
|
||||
}
|
|
@ -151,133 +151,4 @@ impl KeyringProvider {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use futures::FutureExt;
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_no_host() {
|
||||
let url = Url::parse("file:/etc/bin/").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
// Panics due to debug assertion; returns `None` in production
|
||||
let result = std::panic::AssertUnwindSafe(keyring.fetch(&url, "user"))
|
||||
.catch_unwind()
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_with_password() {
|
||||
let url = Url::parse("https://user:password@example.com").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
// Panics due to debug assertion; returns `None` in production
|
||||
let result = std::panic::AssertUnwindSafe(keyring.fetch(&url, url.username()))
|
||||
.catch_unwind()
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_with_no_username() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
// Panics due to debug assertion; returns `None` in production
|
||||
let result = std::panic::AssertUnwindSafe(keyring.fetch(&url, url.username()))
|
||||
.catch_unwind()
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_no_auth() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
let credentials = keyring.fetch(&url, "user");
|
||||
assert!(credentials.await.is_none());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::dummy([((url.host_str().unwrap(), "user"), "password")]);
|
||||
assert_eq!(
|
||||
keyring.fetch(&url, "user").await,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("password".to_string())
|
||||
))
|
||||
);
|
||||
assert_eq!(
|
||||
keyring.fetch(&url.join("test").unwrap(), "user").await,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("password".to_string())
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_no_match() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::dummy([(("other.com", "user"), "password")]);
|
||||
let credentials = keyring.fetch(&url, "user").await;
|
||||
assert_eq!(credentials, None);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_prefers_url_to_host() {
|
||||
let url = Url::parse("https://example.com/").unwrap();
|
||||
let keyring = KeyringProvider::dummy([
|
||||
((url.join("foo").unwrap().as_str(), "user"), "password"),
|
||||
((url.host_str().unwrap(), "user"), "other-password"),
|
||||
]);
|
||||
assert_eq!(
|
||||
keyring.fetch(&url.join("foo").unwrap(), "user").await,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("password".to_string())
|
||||
))
|
||||
);
|
||||
assert_eq!(
|
||||
keyring.fetch(&url, "user").await,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("other-password".to_string())
|
||||
))
|
||||
);
|
||||
assert_eq!(
|
||||
keyring.fetch(&url.join("bar").unwrap(), "user").await,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("other-password".to_string())
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_username() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::dummy([((url.host_str().unwrap(), "user"), "password")]);
|
||||
let credentials = keyring.fetch(&url, "user").await;
|
||||
assert_eq!(
|
||||
credentials,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("password".to_string())
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_username_no_match() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::dummy([((url.host_str().unwrap(), "foo"), "password")]);
|
||||
let credentials = keyring.fetch(&url, "bar").await;
|
||||
assert_eq!(credentials, None);
|
||||
|
||||
// Still fails if we have `foo` in the URL itself
|
||||
let url = Url::parse("https://foo@example.com").unwrap();
|
||||
let credentials = keyring.fetch(&url, "bar").await;
|
||||
assert_eq!(credentials, None);
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
128
crates/uv-auth/src/keyring/tests.rs
Normal file
128
crates/uv-auth/src/keyring/tests.rs
Normal file
|
@ -0,0 +1,128 @@
|
|||
use super::*;
|
||||
use futures::FutureExt;
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_no_host() {
|
||||
let url = Url::parse("file:/etc/bin/").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
// Panics due to debug assertion; returns `None` in production
|
||||
let result = std::panic::AssertUnwindSafe(keyring.fetch(&url, "user"))
|
||||
.catch_unwind()
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_with_password() {
|
||||
let url = Url::parse("https://user:password@example.com").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
// Panics due to debug assertion; returns `None` in production
|
||||
let result = std::panic::AssertUnwindSafe(keyring.fetch(&url, url.username()))
|
||||
.catch_unwind()
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_with_no_username() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
// Panics due to debug assertion; returns `None` in production
|
||||
let result = std::panic::AssertUnwindSafe(keyring.fetch(&url, url.username()))
|
||||
.catch_unwind()
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_no_auth() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
let credentials = keyring.fetch(&url, "user");
|
||||
assert!(credentials.await.is_none());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::dummy([((url.host_str().unwrap(), "user"), "password")]);
|
||||
assert_eq!(
|
||||
keyring.fetch(&url, "user").await,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("password".to_string())
|
||||
))
|
||||
);
|
||||
assert_eq!(
|
||||
keyring.fetch(&url.join("test").unwrap(), "user").await,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("password".to_string())
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_no_match() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::dummy([(("other.com", "user"), "password")]);
|
||||
let credentials = keyring.fetch(&url, "user").await;
|
||||
assert_eq!(credentials, None);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_prefers_url_to_host() {
|
||||
let url = Url::parse("https://example.com/").unwrap();
|
||||
let keyring = KeyringProvider::dummy([
|
||||
((url.join("foo").unwrap().as_str(), "user"), "password"),
|
||||
((url.host_str().unwrap(), "user"), "other-password"),
|
||||
]);
|
||||
assert_eq!(
|
||||
keyring.fetch(&url.join("foo").unwrap(), "user").await,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("password".to_string())
|
||||
))
|
||||
);
|
||||
assert_eq!(
|
||||
keyring.fetch(&url, "user").await,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("other-password".to_string())
|
||||
))
|
||||
);
|
||||
assert_eq!(
|
||||
keyring.fetch(&url.join("bar").unwrap(), "user").await,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("other-password".to_string())
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_username() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::dummy([((url.host_str().unwrap(), "user"), "password")]);
|
||||
let credentials = keyring.fetch(&url, "user").await;
|
||||
assert_eq!(
|
||||
credentials,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("password".to_string())
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_username_no_match() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::dummy([((url.host_str().unwrap(), "foo"), "password")]);
|
||||
let credentials = keyring.fetch(&url, "bar").await;
|
||||
assert_eq!(credentials, None);
|
||||
|
||||
// Still fails if we have `foo` in the URL itself
|
||||
let url = Url::parse("https://foo@example.com").unwrap();
|
||||
let credentials = keyring.fetch(&url, "bar").await;
|
||||
assert_eq!(credentials, None);
|
||||
}
|
File diff suppressed because it is too large
Load diff
1079
crates/uv-auth/src/middleware/tests.rs
Normal file
1079
crates/uv-auth/src/middleware/tests.rs
Normal file
File diff suppressed because it is too large
Load diff
|
@ -59,89 +59,4 @@ impl Display for Realm {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use url::{ParseError, Url};
|
||||
|
||||
use crate::Realm;
|
||||
|
||||
#[test]
|
||||
fn test_should_retain_auth() -> Result<(), ParseError> {
|
||||
// Exact match (https)
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("https://example.com")?),
|
||||
Realm::from(&Url::parse("https://example.com")?)
|
||||
);
|
||||
|
||||
// Exact match (with port)
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("https://example.com:1234")?),
|
||||
Realm::from(&Url::parse("https://example.com:1234")?)
|
||||
);
|
||||
|
||||
// Exact match (http)
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("http://example.com")?),
|
||||
Realm::from(&Url::parse("http://example.com")?)
|
||||
);
|
||||
|
||||
// Okay, path differs
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("http://example.com/foo")?),
|
||||
Realm::from(&Url::parse("http://example.com/bar")?)
|
||||
);
|
||||
|
||||
// Okay, default port differs (https)
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("https://example.com:443")?),
|
||||
Realm::from(&Url::parse("https://example.com")?)
|
||||
);
|
||||
|
||||
// Okay, default port differs (http)
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("http://example.com:80")?),
|
||||
Realm::from(&Url::parse("http://example.com")?)
|
||||
);
|
||||
|
||||
// Mismatched scheme
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://example.com")?),
|
||||
Realm::from(&Url::parse("http://example.com")?)
|
||||
);
|
||||
|
||||
// Mismatched scheme, we explicitly do not allow upgrade to https
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("http://example.com")?),
|
||||
Realm::from(&Url::parse("https://example.com")?)
|
||||
);
|
||||
|
||||
// Mismatched host
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://foo.com")?),
|
||||
Realm::from(&Url::parse("https://bar.com")?)
|
||||
);
|
||||
|
||||
// Mismatched port
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://example.com:1234")?),
|
||||
Realm::from(&Url::parse("https://example.com:5678")?)
|
||||
);
|
||||
|
||||
// Mismatched port, with one as default for scheme
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://example.com:443")?),
|
||||
Realm::from(&Url::parse("https://example.com:5678")?)
|
||||
);
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://example.com:1234")?),
|
||||
Realm::from(&Url::parse("https://example.com:443")?)
|
||||
);
|
||||
|
||||
// Mismatched port, with default for a different scheme
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://example.com:80")?),
|
||||
Realm::from(&Url::parse("https://example.com")?)
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
84
crates/uv-auth/src/realm/tests.rs
Normal file
84
crates/uv-auth/src/realm/tests.rs
Normal file
|
@ -0,0 +1,84 @@
|
|||
use url::{ParseError, Url};
|
||||
|
||||
use crate::Realm;
|
||||
|
||||
#[test]
|
||||
fn test_should_retain_auth() -> Result<(), ParseError> {
|
||||
// Exact match (https)
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("https://example.com")?),
|
||||
Realm::from(&Url::parse("https://example.com")?)
|
||||
);
|
||||
|
||||
// Exact match (with port)
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("https://example.com:1234")?),
|
||||
Realm::from(&Url::parse("https://example.com:1234")?)
|
||||
);
|
||||
|
||||
// Exact match (http)
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("http://example.com")?),
|
||||
Realm::from(&Url::parse("http://example.com")?)
|
||||
);
|
||||
|
||||
// Okay, path differs
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("http://example.com/foo")?),
|
||||
Realm::from(&Url::parse("http://example.com/bar")?)
|
||||
);
|
||||
|
||||
// Okay, default port differs (https)
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("https://example.com:443")?),
|
||||
Realm::from(&Url::parse("https://example.com")?)
|
||||
);
|
||||
|
||||
// Okay, default port differs (http)
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("http://example.com:80")?),
|
||||
Realm::from(&Url::parse("http://example.com")?)
|
||||
);
|
||||
|
||||
// Mismatched scheme
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://example.com")?),
|
||||
Realm::from(&Url::parse("http://example.com")?)
|
||||
);
|
||||
|
||||
// Mismatched scheme, we explicitly do not allow upgrade to https
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("http://example.com")?),
|
||||
Realm::from(&Url::parse("https://example.com")?)
|
||||
);
|
||||
|
||||
// Mismatched host
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://foo.com")?),
|
||||
Realm::from(&Url::parse("https://bar.com")?)
|
||||
);
|
||||
|
||||
// Mismatched port
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://example.com:1234")?),
|
||||
Realm::from(&Url::parse("https://example.com:5678")?)
|
||||
);
|
||||
|
||||
// Mismatched port, with one as default for scheme
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://example.com:443")?),
|
||||
Realm::from(&Url::parse("https://example.com:5678")?)
|
||||
);
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://example.com:1234")?),
|
||||
Realm::from(&Url::parse("https://example.com:443")?)
|
||||
);
|
||||
|
||||
// Mismatched port, with default for a different scheme
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://example.com:80")?),
|
||||
Realm::from(&Url::parse("https://example.com")?)
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -15,6 +15,7 @@ license = { workspace = true }
|
|||
workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
bench = false
|
||||
|
||||
[[bench]]
|
||||
|
|
|
@ -9,6 +9,9 @@ repository.workspace = true
|
|||
authors.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
uv-distribution-filename = { workspace = true }
|
||||
uv-fs = { workspace = true }
|
||||
|
|
|
@ -498,142 +498,4 @@ fn write_record(
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use insta::{assert_snapshot, with_settings};
|
||||
use std::str::FromStr;
|
||||
use tempfile::TempDir;
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pep440::Version;
|
||||
|
||||
#[test]
|
||||
fn test_wheel() {
|
||||
let filename = WheelFilename {
|
||||
name: PackageName::from_str("foo").unwrap(),
|
||||
version: Version::from_str("1.2.3").unwrap(),
|
||||
build_tag: None,
|
||||
python_tag: vec!["py2".to_string(), "py3".to_string()],
|
||||
abi_tag: vec!["none".to_string()],
|
||||
platform_tag: vec!["any".to_string()],
|
||||
};
|
||||
|
||||
with_settings!({
|
||||
filters => [(uv_version::version(), "[VERSION]")],
|
||||
}, {
|
||||
assert_snapshot!(wheel_info(&filename), @r"
|
||||
Wheel-Version: 1.0
|
||||
Generator: uv [VERSION]
|
||||
Root-Is-Purelib: true
|
||||
Tag: py2-none-any
|
||||
Tag: py3-none-any
|
||||
");
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_record() {
|
||||
let record = vec![RecordEntry {
|
||||
path: "uv_backend/__init__.py".to_string(),
|
||||
hash: "89f869e53a3a0061a52c0233e6442d4d72de80a8a2d3406d9ea0bfd397ed7865".to_string(),
|
||||
size: 37,
|
||||
}];
|
||||
|
||||
let mut writer = Vec::new();
|
||||
write_record(&mut writer, "uv_backend-0.1.0", record).unwrap();
|
||||
assert_snapshot!(String::from_utf8(writer).unwrap(), @r"
|
||||
uv_backend/__init__.py,sha256=89f869e53a3a0061a52c0233e6442d4d72de80a8a2d3406d9ea0bfd397ed7865,37
|
||||
uv_backend-0.1.0/RECORD,,
|
||||
");
|
||||
}
|
||||
|
||||
/// Check that we write deterministic wheels.
|
||||
#[test]
|
||||
fn test_determinism() {
|
||||
let temp1 = TempDir::new().unwrap();
|
||||
let uv_backend = Path::new("../../scripts/packages/uv_backend");
|
||||
build(uv_backend, temp1.path(), None).unwrap();
|
||||
|
||||
// Touch the file to check that we don't serialize the last modified date.
|
||||
fs_err::write(
|
||||
uv_backend.join("src/uv_backend/__init__.py"),
|
||||
"def greet():\n print(\"Hello 👋\")\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let temp2 = TempDir::new().unwrap();
|
||||
build(uv_backend, temp2.path(), None).unwrap();
|
||||
|
||||
let wheel_filename = "uv_backend-0.1.0-py3-none-any.whl";
|
||||
assert_eq!(
|
||||
fs_err::read(temp1.path().join(wheel_filename)).unwrap(),
|
||||
fs_err::read(temp2.path().join(wheel_filename)).unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
/// Snapshot all files from the prepare metadata hook.
|
||||
#[test]
|
||||
fn test_prepare_metadata() {
|
||||
let metadata_dir = TempDir::new().unwrap();
|
||||
let uv_backend = Path::new("../../scripts/packages/uv_backend");
|
||||
metadata(uv_backend, metadata_dir.path()).unwrap();
|
||||
|
||||
let mut files: Vec<_> = WalkDir::new(metadata_dir.path())
|
||||
.into_iter()
|
||||
.map(|entry| {
|
||||
entry
|
||||
.unwrap()
|
||||
.path()
|
||||
.strip_prefix(metadata_dir.path())
|
||||
.unwrap()
|
||||
.portable_display()
|
||||
.to_string()
|
||||
})
|
||||
.filter(|path| !path.is_empty())
|
||||
.collect();
|
||||
files.sort();
|
||||
assert_snapshot!(files.join("\n"), @r"
|
||||
uv_backend-0.1.0.dist-info
|
||||
uv_backend-0.1.0.dist-info/METADATA
|
||||
uv_backend-0.1.0.dist-info/RECORD
|
||||
uv_backend-0.1.0.dist-info/WHEEL
|
||||
");
|
||||
|
||||
let metadata_file = metadata_dir
|
||||
.path()
|
||||
.join("uv_backend-0.1.0.dist-info/METADATA");
|
||||
assert_snapshot!(fs_err::read_to_string(metadata_file).unwrap(), @r###"
|
||||
Metadata-Version: 2.3
|
||||
Name: uv-backend
|
||||
Version: 0.1.0
|
||||
Summary: Add your description here
|
||||
Requires-Python: >=3.12
|
||||
Description-Content-Type: text/markdown
|
||||
|
||||
# uv_backend
|
||||
|
||||
A simple package to be built with the uv build backend.
|
||||
"###);
|
||||
|
||||
let record_file = metadata_dir
|
||||
.path()
|
||||
.join("uv_backend-0.1.0.dist-info/RECORD");
|
||||
assert_snapshot!(fs_err::read_to_string(record_file).unwrap(), @r###"
|
||||
uv_backend-0.1.0.dist-info/WHEEL,sha256=70ce44709b6a53e0d0c5a6755b0290179697020f1f867e794f26154fe4825738,79
|
||||
uv_backend-0.1.0.dist-info/METADATA,sha256=e4a0d390317d7182f65ea978254c71ed283e0a4242150cf1c99a694b113ff68d,224
|
||||
uv_backend-0.1.0.dist-info/RECORD,,
|
||||
"###);
|
||||
|
||||
let wheel_file = metadata_dir.path().join("uv_backend-0.1.0.dist-info/WHEEL");
|
||||
let filters = vec![(uv_version::version(), "[VERSION]")];
|
||||
with_settings!({
|
||||
filters => filters
|
||||
}, {
|
||||
assert_snapshot!(fs_err::read_to_string(wheel_file).unwrap(), @r###"
|
||||
Wheel-Version: 1.0
|
||||
Generator: uv [VERSION]
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
"###);
|
||||
});
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
|
@ -629,406 +629,4 @@ struct BuildSystem {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use indoc::{formatdoc, indoc};
|
||||
use insta::assert_snapshot;
|
||||
use std::iter;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn extend_project(payload: &str) -> String {
|
||||
formatdoc! {r#"
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
{payload}
|
||||
|
||||
[build-system]
|
||||
requires = ["uv>=0.4.15,<5"]
|
||||
build-backend = "uv"
|
||||
"#
|
||||
}
|
||||
}
|
||||
|
||||
fn format_err(err: impl std::error::Error) -> String {
|
||||
let mut formatted = err.to_string();
|
||||
for source in iter::successors(err.source(), |&err| err.source()) {
|
||||
formatted += &format!("\n Caused by: {source}");
|
||||
}
|
||||
formatted
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn valid() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
fs_err::write(
|
||||
temp_dir.path().join("Readme.md"),
|
||||
indoc! {r"
|
||||
# Foo
|
||||
|
||||
This is the foo library.
|
||||
"},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
fs_err::write(
|
||||
temp_dir.path().join("License.txt"),
|
||||
indoc! {r#"
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
|
||||
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
|
||||
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
|
||||
OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
"#},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let contents = indoc! {r#"
|
||||
# See https://github.com/pypa/sampleproject/blob/main/pyproject.toml for another example
|
||||
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
description = "A Python package"
|
||||
readme = "Readme.md"
|
||||
requires_python = ">=3.12"
|
||||
license = { file = "License.txt" }
|
||||
authors = [{ name = "Ferris the crab", email = "ferris@rustacean.net" }]
|
||||
maintainers = [{ name = "Konsti", email = "konstin@mailbox.org" }]
|
||||
keywords = ["demo", "example", "package"]
|
||||
classifiers = [
|
||||
"Development Status :: 6 - Mature",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
# https://github.com/pypa/trove-classifiers/issues/17
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Programming Language :: Python",
|
||||
]
|
||||
dependencies = ["flask>=3,<4", "sqlalchemy[asyncio]>=2.0.35,<3"]
|
||||
# We don't support dynamic fields, the default empty array is the only allowed value.
|
||||
dynamic = []
|
||||
|
||||
[project.optional-dependencies]
|
||||
postgres = ["psycopg>=3.2.2,<4"]
|
||||
mysql = ["pymysql>=1.1.1,<2"]
|
||||
|
||||
[project.urls]
|
||||
"Homepage" = "https://github.com/astral-sh/uv"
|
||||
"Repository" = "https://astral.sh"
|
||||
|
||||
[project.scripts]
|
||||
foo = "foo.cli:__main__"
|
||||
|
||||
[project.gui-scripts]
|
||||
foo-gui = "foo.gui"
|
||||
|
||||
[project.entry-points.bar_group]
|
||||
foo-bar = "foo:bar"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv>=0.4.15,<5"]
|
||||
build-backend = "uv"
|
||||
"#
|
||||
};
|
||||
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
||||
|
||||
assert_snapshot!(metadata.core_metadata_format(), @r###"
|
||||
Metadata-Version: 2.3
|
||||
Name: hello-world
|
||||
Version: 0.1.0
|
||||
Summary: A Python package
|
||||
Keywords: demo,example,package
|
||||
Author: Ferris the crab
|
||||
License: THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
|
||||
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
|
||||
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
|
||||
OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
Classifier: Development Status :: 6 - Mature
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: License :: OSI Approved :: Apache Software License
|
||||
Classifier: Programming Language :: Python
|
||||
Requires-Dist: flask>=3,<4
|
||||
Requires-Dist: sqlalchemy[asyncio]>=2.0.35,<3
|
||||
Maintainer: Konsti
|
||||
Project-URL: Homepage, https://github.com/astral-sh/uv
|
||||
Project-URL: Repository, https://astral.sh
|
||||
Provides-Extra: mysql
|
||||
Provides-Extra: postgres
|
||||
Description-Content-Type: text/markdown
|
||||
|
||||
# Foo
|
||||
|
||||
This is the foo library.
|
||||
"###);
|
||||
|
||||
assert_snapshot!(pyproject_toml.to_entry_points().unwrap().unwrap(), @r###"
|
||||
[console_scripts]
|
||||
foo = foo.cli:__main__
|
||||
|
||||
[gui_scripts]
|
||||
foo-gui = foo.gui
|
||||
|
||||
[bar_group]
|
||||
foo-bar = foo:bar
|
||||
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_system_valid() {
|
||||
let contents = extend_project("");
|
||||
let pyproject_toml = PyProjectToml::parse(&contents).unwrap();
|
||||
assert!(pyproject_toml.check_build_system());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_system_no_bound() {
|
||||
let contents = indoc! {r#"
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv"]
|
||||
build-backend = "uv"
|
||||
"#};
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
assert!(!pyproject_toml.check_build_system());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_system_multiple_packages() {
|
||||
let contents = indoc! {r#"
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv>=0.4.15,<5", "wheel"]
|
||||
build-backend = "uv"
|
||||
"#};
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
assert!(!pyproject_toml.check_build_system());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_system_no_requires_uv() {
|
||||
let contents = indoc! {r#"
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools"]
|
||||
build-backend = "uv"
|
||||
"#};
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
assert!(!pyproject_toml.check_build_system());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_system_not_uv() {
|
||||
let contents = indoc! {r#"
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv>=0.4.15,<5"]
|
||||
build-backend = "setuptools"
|
||||
"#};
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
assert!(!pyproject_toml.check_build_system());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn minimal() {
|
||||
let contents = extend_project("");
|
||||
|
||||
let metadata = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap();
|
||||
|
||||
assert_snapshot!(metadata.core_metadata_format(), @r###"
|
||||
Metadata-Version: 2.3
|
||||
Name: hello-world
|
||||
Version: 0.1.0
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_readme_spec() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
readme = { path = "Readme.md" }
|
||||
"#
|
||||
});
|
||||
|
||||
let err = PyProjectToml::parse(&contents).unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: TOML parse error at line 4, column 10
|
||||
|
|
||||
4 | readme = { path = "Readme.md" }
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
data did not match any variant of untagged enum Readme
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_readme() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
readme = "Readme.md"
|
||||
"#
|
||||
});
|
||||
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
// Simplified for windows compatibility.
|
||||
assert_snapshot!(err.to_string().replace('\\', "/"), @"failed to open file `/do/not/read/Readme.md`");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiline_description() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
description = "Hi :)\nThis is my project"
|
||||
"#
|
||||
});
|
||||
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: `project.description` must be a single line
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mixed_licenses() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
license-files = ["licenses/*"]
|
||||
license = { text = "MIT" }
|
||||
"#
|
||||
});
|
||||
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: When `project.license-files` is defined, `project.license` must be an SPDX expression string
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn valid_license() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
license = "MIT OR Apache-2.0"
|
||||
"#
|
||||
});
|
||||
let metadata = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap();
|
||||
assert_snapshot!(metadata.core_metadata_format(), @r###"
|
||||
Metadata-Version: 2.4
|
||||
Name: hello-world
|
||||
Version: 0.1.0
|
||||
License-Expression: MIT OR Apache-2.0
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_license() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
license = "MIT XOR Apache-2"
|
||||
"#
|
||||
});
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
// TODO(konsti): We mess up the indentation in the error.
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: `project.license` is not a valid SPDX expression: `MIT XOR Apache-2`
|
||||
Caused by: MIT XOR Apache-2
|
||||
^^^ unknown term
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dynamic() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
dynamic = ["dependencies"]
|
||||
"#
|
||||
});
|
||||
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: Dynamic metadata is not supported
|
||||
"###);
|
||||
}
|
||||
|
||||
fn script_error(contents: &str) -> String {
|
||||
let err = PyProjectToml::parse(contents)
|
||||
.unwrap()
|
||||
.to_entry_points()
|
||||
.unwrap_err();
|
||||
format_err(err)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_entry_point_group() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
[project.entry-points."a@b"]
|
||||
foo = "bar"
|
||||
"#
|
||||
});
|
||||
assert_snapshot!(script_error(&contents), @"Entrypoint groups must consist of letters and numbers separated by dots, invalid group: `a@b`");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_entry_point_name() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
[project.scripts]
|
||||
"a@b" = "bar"
|
||||
"#
|
||||
});
|
||||
assert_snapshot!(script_error(&contents), @"Entrypoint names must consist of letters, numbers, dots and dashes; invalid name: `a@b`");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_entry_point_conflict_scripts() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
[project.entry-points.console_scripts]
|
||||
foo = "bar"
|
||||
"#
|
||||
});
|
||||
assert_snapshot!(script_error(&contents), @"Use `project.scripts` instead of `project.entry-points.console_scripts`");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_entry_point_conflict_gui_scripts() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
[project.entry-points.gui_scripts]
|
||||
foo = "bar"
|
||||
"#
|
||||
});
|
||||
assert_snapshot!(script_error(&contents), @"Use `project.gui-scripts` instead of `project.entry-points.gui_scripts`");
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
401
crates/uv-build-backend/src/metadata/tests.rs
Normal file
401
crates/uv-build-backend/src/metadata/tests.rs
Normal file
|
@ -0,0 +1,401 @@
|
|||
use super::*;
|
||||
use indoc::{formatdoc, indoc};
|
||||
use insta::assert_snapshot;
|
||||
use std::iter;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn extend_project(payload: &str) -> String {
|
||||
formatdoc! {r#"
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
{payload}
|
||||
|
||||
[build-system]
|
||||
requires = ["uv>=0.4.15,<5"]
|
||||
build-backend = "uv"
|
||||
"#
|
||||
}
|
||||
}
|
||||
|
||||
fn format_err(err: impl std::error::Error) -> String {
|
||||
let mut formatted = err.to_string();
|
||||
for source in iter::successors(err.source(), |&err| err.source()) {
|
||||
formatted += &format!("\n Caused by: {source}");
|
||||
}
|
||||
formatted
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn valid() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
fs_err::write(
|
||||
temp_dir.path().join("Readme.md"),
|
||||
indoc! {r"
|
||||
# Foo
|
||||
|
||||
This is the foo library.
|
||||
"},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
fs_err::write(
|
||||
temp_dir.path().join("License.txt"),
|
||||
indoc! {r#"
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
|
||||
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
|
||||
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
|
||||
OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
"#},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let contents = indoc! {r#"
|
||||
# See https://github.com/pypa/sampleproject/blob/main/pyproject.toml for another example
|
||||
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
description = "A Python package"
|
||||
readme = "Readme.md"
|
||||
requires_python = ">=3.12"
|
||||
license = { file = "License.txt" }
|
||||
authors = [{ name = "Ferris the crab", email = "ferris@rustacean.net" }]
|
||||
maintainers = [{ name = "Konsti", email = "konstin@mailbox.org" }]
|
||||
keywords = ["demo", "example", "package"]
|
||||
classifiers = [
|
||||
"Development Status :: 6 - Mature",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
# https://github.com/pypa/trove-classifiers/issues/17
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Programming Language :: Python",
|
||||
]
|
||||
dependencies = ["flask>=3,<4", "sqlalchemy[asyncio]>=2.0.35,<3"]
|
||||
# We don't support dynamic fields, the default empty array is the only allowed value.
|
||||
dynamic = []
|
||||
|
||||
[project.optional-dependencies]
|
||||
postgres = ["psycopg>=3.2.2,<4"]
|
||||
mysql = ["pymysql>=1.1.1,<2"]
|
||||
|
||||
[project.urls]
|
||||
"Homepage" = "https://github.com/astral-sh/uv"
|
||||
"Repository" = "https://astral.sh"
|
||||
|
||||
[project.scripts]
|
||||
foo = "foo.cli:__main__"
|
||||
|
||||
[project.gui-scripts]
|
||||
foo-gui = "foo.gui"
|
||||
|
||||
[project.entry-points.bar_group]
|
||||
foo-bar = "foo:bar"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv>=0.4.15,<5"]
|
||||
build-backend = "uv"
|
||||
"#
|
||||
};
|
||||
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
||||
|
||||
assert_snapshot!(metadata.core_metadata_format(), @r###"
|
||||
Metadata-Version: 2.3
|
||||
Name: hello-world
|
||||
Version: 0.1.0
|
||||
Summary: A Python package
|
||||
Keywords: demo,example,package
|
||||
Author: Ferris the crab
|
||||
License: THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
|
||||
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
|
||||
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
|
||||
OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
Classifier: Development Status :: 6 - Mature
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: License :: OSI Approved :: Apache Software License
|
||||
Classifier: Programming Language :: Python
|
||||
Requires-Dist: flask>=3,<4
|
||||
Requires-Dist: sqlalchemy[asyncio]>=2.0.35,<3
|
||||
Maintainer: Konsti
|
||||
Project-URL: Homepage, https://github.com/astral-sh/uv
|
||||
Project-URL: Repository, https://astral.sh
|
||||
Provides-Extra: mysql
|
||||
Provides-Extra: postgres
|
||||
Description-Content-Type: text/markdown
|
||||
|
||||
# Foo
|
||||
|
||||
This is the foo library.
|
||||
"###);
|
||||
|
||||
assert_snapshot!(pyproject_toml.to_entry_points().unwrap().unwrap(), @r###"
|
||||
[console_scripts]
|
||||
foo = foo.cli:__main__
|
||||
|
||||
[gui_scripts]
|
||||
foo-gui = foo.gui
|
||||
|
||||
[bar_group]
|
||||
foo-bar = foo:bar
|
||||
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_system_valid() {
|
||||
let contents = extend_project("");
|
||||
let pyproject_toml = PyProjectToml::parse(&contents).unwrap();
|
||||
assert!(pyproject_toml.check_build_system());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_system_no_bound() {
|
||||
let contents = indoc! {r#"
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv"]
|
||||
build-backend = "uv"
|
||||
"#};
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
assert!(!pyproject_toml.check_build_system());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_system_multiple_packages() {
|
||||
let contents = indoc! {r#"
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv>=0.4.15,<5", "wheel"]
|
||||
build-backend = "uv"
|
||||
"#};
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
assert!(!pyproject_toml.check_build_system());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_system_no_requires_uv() {
|
||||
let contents = indoc! {r#"
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools"]
|
||||
build-backend = "uv"
|
||||
"#};
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
assert!(!pyproject_toml.check_build_system());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_system_not_uv() {
|
||||
let contents = indoc! {r#"
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv>=0.4.15,<5"]
|
||||
build-backend = "setuptools"
|
||||
"#};
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
assert!(!pyproject_toml.check_build_system());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn minimal() {
|
||||
let contents = extend_project("");
|
||||
|
||||
let metadata = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap();
|
||||
|
||||
assert_snapshot!(metadata.core_metadata_format(), @r###"
|
||||
Metadata-Version: 2.3
|
||||
Name: hello-world
|
||||
Version: 0.1.0
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_readme_spec() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
readme = { path = "Readme.md" }
|
||||
"#
|
||||
});
|
||||
|
||||
let err = PyProjectToml::parse(&contents).unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: TOML parse error at line 4, column 10
|
||||
|
|
||||
4 | readme = { path = "Readme.md" }
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
data did not match any variant of untagged enum Readme
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_readme() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
readme = "Readme.md"
|
||||
"#
|
||||
});
|
||||
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
// Simplified for windows compatibility.
|
||||
assert_snapshot!(err.to_string().replace('\\', "/"), @"failed to open file `/do/not/read/Readme.md`");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiline_description() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
description = "Hi :)\nThis is my project"
|
||||
"#
|
||||
});
|
||||
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: `project.description` must be a single line
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mixed_licenses() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
license-files = ["licenses/*"]
|
||||
license = { text = "MIT" }
|
||||
"#
|
||||
});
|
||||
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: When `project.license-files` is defined, `project.license` must be an SPDX expression string
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn valid_license() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
license = "MIT OR Apache-2.0"
|
||||
"#
|
||||
});
|
||||
let metadata = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap();
|
||||
assert_snapshot!(metadata.core_metadata_format(), @r###"
|
||||
Metadata-Version: 2.4
|
||||
Name: hello-world
|
||||
Version: 0.1.0
|
||||
License-Expression: MIT OR Apache-2.0
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_license() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
license = "MIT XOR Apache-2"
|
||||
"#
|
||||
});
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
// TODO(konsti): We mess up the indentation in the error.
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: `project.license` is not a valid SPDX expression: `MIT XOR Apache-2`
|
||||
Caused by: MIT XOR Apache-2
|
||||
^^^ unknown term
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dynamic() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
dynamic = ["dependencies"]
|
||||
"#
|
||||
});
|
||||
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: Dynamic metadata is not supported
|
||||
"###);
|
||||
}
|
||||
|
||||
fn script_error(contents: &str) -> String {
|
||||
let err = PyProjectToml::parse(contents)
|
||||
.unwrap()
|
||||
.to_entry_points()
|
||||
.unwrap_err();
|
||||
format_err(err)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_entry_point_group() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
[project.entry-points."a@b"]
|
||||
foo = "bar"
|
||||
"#
|
||||
});
|
||||
assert_snapshot!(script_error(&contents), @"Entrypoint groups must consist of letters and numbers separated by dots, invalid group: `a@b`");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_entry_point_name() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
[project.scripts]
|
||||
"a@b" = "bar"
|
||||
"#
|
||||
});
|
||||
assert_snapshot!(script_error(&contents), @"Entrypoint names must consist of letters, numbers, dots and dashes; invalid name: `a@b`");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_entry_point_conflict_scripts() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
[project.entry-points.console_scripts]
|
||||
foo = "bar"
|
||||
"#
|
||||
});
|
||||
assert_snapshot!(script_error(&contents), @"Use `project.scripts` instead of `project.entry-points.console_scripts`");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_entry_point_conflict_gui_scripts() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
[project.entry-points.gui_scripts]
|
||||
foo = "bar"
|
||||
"#
|
||||
});
|
||||
assert_snapshot!(script_error(&contents), @"Use `project.gui-scripts` instead of `project.entry-points.gui_scripts`");
|
||||
}
|
|
@ -78,59 +78,4 @@ pub(crate) fn parse_pep639_glob(glob: &str) -> Result<Pattern, Pep639GlobError>
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use insta::assert_snapshot;
|
||||
|
||||
#[test]
|
||||
fn test_error() {
|
||||
let parse_err = |glob| parse_pep639_glob(glob).unwrap_err().to_string();
|
||||
assert_snapshot!(
|
||||
parse_err(".."),
|
||||
@"The parent directory operator (`..`) at position 0 is not allowed in license file globs"
|
||||
);
|
||||
assert_snapshot!(
|
||||
parse_err("licenses/.."),
|
||||
@"The parent directory operator (`..`) at position 9 is not allowed in license file globs"
|
||||
);
|
||||
assert_snapshot!(
|
||||
parse_err("licenses/LICEN!E.txt"),
|
||||
@"Glob contains invalid character at position 14: `!`"
|
||||
);
|
||||
assert_snapshot!(
|
||||
parse_err("licenses/LICEN[!C]E.txt"),
|
||||
@"Glob contains invalid character in range at position 15: `!`"
|
||||
);
|
||||
assert_snapshot!(
|
||||
parse_err("licenses/LICEN[C?]E.txt"),
|
||||
@"Glob contains invalid character in range at position 16: `?`"
|
||||
);
|
||||
assert_snapshot!(parse_err("******"), @"Pattern syntax error near position 2: wildcards are either regular `*` or recursive `**`");
|
||||
assert_snapshot!(
|
||||
parse_err(r"licenses\eula.txt"),
|
||||
@r"Glob contains invalid character at position 8: `\`"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_valid() {
|
||||
let cases = [
|
||||
"licenses/*.txt",
|
||||
"licenses/**/*.txt",
|
||||
"LICEN[CS]E.txt",
|
||||
"LICEN?E.txt",
|
||||
"[a-z].txt",
|
||||
"[a-z._-].txt",
|
||||
"*/**",
|
||||
"LICENSE..txt",
|
||||
"LICENSE_file-1.txt",
|
||||
// (google translate)
|
||||
"licenses/라이센스*.txt",
|
||||
"licenses/ライセンス*.txt",
|
||||
"licenses/执照*.txt",
|
||||
];
|
||||
for case in cases {
|
||||
parse_pep639_glob(case).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
54
crates/uv-build-backend/src/pep639_glob/tests.rs
Normal file
54
crates/uv-build-backend/src/pep639_glob/tests.rs
Normal file
|
@ -0,0 +1,54 @@
|
|||
use super::*;
|
||||
use insta::assert_snapshot;
|
||||
|
||||
#[test]
|
||||
fn test_error() {
|
||||
let parse_err = |glob| parse_pep639_glob(glob).unwrap_err().to_string();
|
||||
assert_snapshot!(
|
||||
parse_err(".."),
|
||||
@"The parent directory operator (`..`) at position 0 is not allowed in license file globs"
|
||||
);
|
||||
assert_snapshot!(
|
||||
parse_err("licenses/.."),
|
||||
@"The parent directory operator (`..`) at position 9 is not allowed in license file globs"
|
||||
);
|
||||
assert_snapshot!(
|
||||
parse_err("licenses/LICEN!E.txt"),
|
||||
@"Glob contains invalid character at position 14: `!`"
|
||||
);
|
||||
assert_snapshot!(
|
||||
parse_err("licenses/LICEN[!C]E.txt"),
|
||||
@"Glob contains invalid character in range at position 15: `!`"
|
||||
);
|
||||
assert_snapshot!(
|
||||
parse_err("licenses/LICEN[C?]E.txt"),
|
||||
@"Glob contains invalid character in range at position 16: `?`"
|
||||
);
|
||||
assert_snapshot!(parse_err("******"), @"Pattern syntax error near position 2: wildcards are either regular `*` or recursive `**`");
|
||||
assert_snapshot!(
|
||||
parse_err(r"licenses\eula.txt"),
|
||||
@r"Glob contains invalid character at position 8: `\`"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_valid() {
|
||||
let cases = [
|
||||
"licenses/*.txt",
|
||||
"licenses/**/*.txt",
|
||||
"LICEN[CS]E.txt",
|
||||
"LICEN?E.txt",
|
||||
"[a-z].txt",
|
||||
"[a-z._-].txt",
|
||||
"*/**",
|
||||
"LICENSE..txt",
|
||||
"LICENSE_file-1.txt",
|
||||
// (google translate)
|
||||
"licenses/라이센스*.txt",
|
||||
"licenses/ライセンス*.txt",
|
||||
"licenses/执照*.txt",
|
||||
];
|
||||
for case in cases {
|
||||
parse_pep639_glob(case).unwrap();
|
||||
}
|
||||
}
|
137
crates/uv-build-backend/src/tests.rs
Normal file
137
crates/uv-build-backend/src/tests.rs
Normal file
|
@ -0,0 +1,137 @@
|
|||
use super::*;
|
||||
use insta::{assert_snapshot, with_settings};
|
||||
use std::str::FromStr;
|
||||
use tempfile::TempDir;
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pep440::Version;
|
||||
|
||||
#[test]
|
||||
fn test_wheel() {
|
||||
let filename = WheelFilename {
|
||||
name: PackageName::from_str("foo").unwrap(),
|
||||
version: Version::from_str("1.2.3").unwrap(),
|
||||
build_tag: None,
|
||||
python_tag: vec!["py2".to_string(), "py3".to_string()],
|
||||
abi_tag: vec!["none".to_string()],
|
||||
platform_tag: vec!["any".to_string()],
|
||||
};
|
||||
|
||||
with_settings!({
|
||||
filters => [(uv_version::version(), "[VERSION]")],
|
||||
}, {
|
||||
assert_snapshot!(wheel_info(&filename), @r"
|
||||
Wheel-Version: 1.0
|
||||
Generator: uv [VERSION]
|
||||
Root-Is-Purelib: true
|
||||
Tag: py2-none-any
|
||||
Tag: py3-none-any
|
||||
");
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_record() {
|
||||
let record = vec![RecordEntry {
|
||||
path: "uv_backend/__init__.py".to_string(),
|
||||
hash: "89f869e53a3a0061a52c0233e6442d4d72de80a8a2d3406d9ea0bfd397ed7865".to_string(),
|
||||
size: 37,
|
||||
}];
|
||||
|
||||
let mut writer = Vec::new();
|
||||
write_record(&mut writer, "uv_backend-0.1.0", record).unwrap();
|
||||
assert_snapshot!(String::from_utf8(writer).unwrap(), @r"
|
||||
uv_backend/__init__.py,sha256=89f869e53a3a0061a52c0233e6442d4d72de80a8a2d3406d9ea0bfd397ed7865,37
|
||||
uv_backend-0.1.0/RECORD,,
|
||||
");
|
||||
}
|
||||
|
||||
/// Check that we write deterministic wheels.
|
||||
#[test]
|
||||
fn test_determinism() {
|
||||
let temp1 = TempDir::new().unwrap();
|
||||
let uv_backend = Path::new("../../scripts/packages/uv_backend");
|
||||
build(uv_backend, temp1.path(), None).unwrap();
|
||||
|
||||
// Touch the file to check that we don't serialize the last modified date.
|
||||
fs_err::write(
|
||||
uv_backend.join("src/uv_backend/__init__.py"),
|
||||
"def greet():\n print(\"Hello 👋\")\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let temp2 = TempDir::new().unwrap();
|
||||
build(uv_backend, temp2.path(), None).unwrap();
|
||||
|
||||
let wheel_filename = "uv_backend-0.1.0-py3-none-any.whl";
|
||||
assert_eq!(
|
||||
fs_err::read(temp1.path().join(wheel_filename)).unwrap(),
|
||||
fs_err::read(temp2.path().join(wheel_filename)).unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
/// Snapshot all files from the prepare metadata hook.
|
||||
#[test]
|
||||
fn test_prepare_metadata() {
|
||||
let metadata_dir = TempDir::new().unwrap();
|
||||
let uv_backend = Path::new("../../scripts/packages/uv_backend");
|
||||
metadata(uv_backend, metadata_dir.path()).unwrap();
|
||||
|
||||
let mut files: Vec<_> = WalkDir::new(metadata_dir.path())
|
||||
.into_iter()
|
||||
.map(|entry| {
|
||||
entry
|
||||
.unwrap()
|
||||
.path()
|
||||
.strip_prefix(metadata_dir.path())
|
||||
.unwrap()
|
||||
.portable_display()
|
||||
.to_string()
|
||||
})
|
||||
.filter(|path| !path.is_empty())
|
||||
.collect();
|
||||
files.sort();
|
||||
assert_snapshot!(files.join("\n"), @r"
|
||||
uv_backend-0.1.0.dist-info
|
||||
uv_backend-0.1.0.dist-info/METADATA
|
||||
uv_backend-0.1.0.dist-info/RECORD
|
||||
uv_backend-0.1.0.dist-info/WHEEL
|
||||
");
|
||||
|
||||
let metadata_file = metadata_dir
|
||||
.path()
|
||||
.join("uv_backend-0.1.0.dist-info/METADATA");
|
||||
assert_snapshot!(fs_err::read_to_string(metadata_file).unwrap(), @r###"
|
||||
Metadata-Version: 2.3
|
||||
Name: uv-backend
|
||||
Version: 0.1.0
|
||||
Summary: Add your description here
|
||||
Requires-Python: >=3.12
|
||||
Description-Content-Type: text/markdown
|
||||
|
||||
# uv_backend
|
||||
|
||||
A simple package to be built with the uv build backend.
|
||||
"###);
|
||||
|
||||
let record_file = metadata_dir
|
||||
.path()
|
||||
.join("uv_backend-0.1.0.dist-info/RECORD");
|
||||
assert_snapshot!(fs_err::read_to_string(record_file).unwrap(), @r###"
|
||||
uv_backend-0.1.0.dist-info/WHEEL,sha256=70ce44709b6a53e0d0c5a6755b0290179697020f1f867e794f26154fe4825738,79
|
||||
uv_backend-0.1.0.dist-info/METADATA,sha256=e4a0d390317d7182f65ea978254c71ed283e0a4242150cf1c99a694b113ff68d,224
|
||||
uv_backend-0.1.0.dist-info/RECORD,,
|
||||
"###);
|
||||
|
||||
let wheel_file = metadata_dir.path().join("uv_backend-0.1.0.dist-info/WHEEL");
|
||||
let filters = vec![(uv_version::version(), "[VERSION]")];
|
||||
with_settings!({
|
||||
filters => filters
|
||||
}, {
|
||||
assert_snapshot!(fs_err::read_to_string(wheel_file).unwrap(), @r###"
|
||||
Wheel-Version: 1.0
|
||||
Generator: uv [VERSION]
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
"###);
|
||||
});
|
||||
}
|
|
@ -10,6 +10,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -9,6 +9,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -10,6 +10,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -181,144 +181,4 @@ impl std::fmt::Display for RepositoryUrl {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn user_credential_does_not_affect_cache_key() -> Result<(), url::ParseError> {
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse("https://example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||
.cache_key(&mut hasher);
|
||||
let hash_without_creds = hasher.finish();
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse(
|
||||
"https://user:foo@example.com/pypa/sample-namespace-packages.git@2.0.0",
|
||||
)?
|
||||
.cache_key(&mut hasher);
|
||||
let hash_with_creds = hasher.finish();
|
||||
assert_eq!(
|
||||
hash_without_creds, hash_with_creds,
|
||||
"URLs with no user credentials should hash the same as URLs with different user credentials",
|
||||
);
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse(
|
||||
"https://user:bar@example.com/pypa/sample-namespace-packages.git@2.0.0",
|
||||
)?
|
||||
.cache_key(&mut hasher);
|
||||
let hash_with_creds = hasher.finish();
|
||||
assert_eq!(
|
||||
hash_without_creds, hash_with_creds,
|
||||
"URLs with different user credentials should hash the same",
|
||||
);
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse("https://:bar@example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||
.cache_key(&mut hasher);
|
||||
let hash_with_creds = hasher.finish();
|
||||
assert_eq!(
|
||||
hash_without_creds, hash_with_creds,
|
||||
"URLs with no username, though with a password, should hash the same as URLs with different user credentials",
|
||||
);
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse("https://user:@example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||
.cache_key(&mut hasher);
|
||||
let hash_with_creds = hasher.finish();
|
||||
assert_eq!(
|
||||
hash_without_creds, hash_with_creds,
|
||||
"URLs with no password, though with a username, should hash the same as URLs with different user credentials",
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn canonical_url() -> Result<(), url::ParseError> {
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages")?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git@2.0.0")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages@2.0.0")?,
|
||||
);
|
||||
|
||||
// Two URLs should be _not_ considered equal if they point to different repositories.
|
||||
assert_ne!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-packages.git")?,
|
||||
);
|
||||
|
||||
// Two URLs should _not_ be considered equal if they request different subdirectories.
|
||||
assert_ne!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git#subdirectory=pkg_resources/pkg_a")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git#subdirectory=pkg_resources/pkg_b")?,
|
||||
);
|
||||
|
||||
// Two URLs should _not_ be considered equal if they request different commit tags.
|
||||
assert_ne!(
|
||||
CanonicalUrl::parse(
|
||||
"git+https://github.com/pypa/sample-namespace-packages.git@v1.0.0"
|
||||
)?,
|
||||
CanonicalUrl::parse(
|
||||
"git+https://github.com/pypa/sample-namespace-packages.git@v2.0.0"
|
||||
)?,
|
||||
);
|
||||
|
||||
// Two URLs that cannot be a base should be considered equal.
|
||||
assert_eq!(
|
||||
CanonicalUrl::parse("git+https:://github.com/pypa/sample-namespace-packages.git")?,
|
||||
CanonicalUrl::parse("git+https:://github.com/pypa/sample-namespace-packages.git")?,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn repository_url() -> Result<(), url::ParseError> {
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages")?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse(
|
||||
"git+https://github.com/pypa/sample-namespace-packages.git@2.0.0"
|
||||
)?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages@2.0.0")?,
|
||||
);
|
||||
|
||||
// Two URLs should be _not_ considered equal if they point to different repositories.
|
||||
assert_ne!(
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-packages.git")?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal if they map to the same repository, even if they
|
||||
// request different subdirectories.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git#subdirectory=pkg_resources/pkg_a")?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git#subdirectory=pkg_resources/pkg_b")?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal if they map to the same repository, even if they
|
||||
// request different commit tags.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse(
|
||||
"git+https://github.com/pypa/sample-namespace-packages.git@v1.0.0"
|
||||
)?,
|
||||
RepositoryUrl::parse(
|
||||
"git+https://github.com/pypa/sample-namespace-packages.git@v2.0.0"
|
||||
)?,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
125
crates/uv-cache-key/src/canonical_url/tests.rs
Normal file
125
crates/uv-cache-key/src/canonical_url/tests.rs
Normal file
|
@ -0,0 +1,125 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn user_credential_does_not_affect_cache_key() -> Result<(), url::ParseError> {
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse("https://example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||
.cache_key(&mut hasher);
|
||||
let hash_without_creds = hasher.finish();
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse("https://user:foo@example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||
.cache_key(&mut hasher);
|
||||
let hash_with_creds = hasher.finish();
|
||||
assert_eq!(
|
||||
hash_without_creds, hash_with_creds,
|
||||
"URLs with no user credentials should hash the same as URLs with different user credentials",
|
||||
);
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse("https://user:bar@example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||
.cache_key(&mut hasher);
|
||||
let hash_with_creds = hasher.finish();
|
||||
assert_eq!(
|
||||
hash_without_creds, hash_with_creds,
|
||||
"URLs with different user credentials should hash the same",
|
||||
);
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse("https://:bar@example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||
.cache_key(&mut hasher);
|
||||
let hash_with_creds = hasher.finish();
|
||||
assert_eq!(
|
||||
hash_without_creds, hash_with_creds,
|
||||
"URLs with no username, though with a password, should hash the same as URLs with different user credentials",
|
||||
);
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse("https://user:@example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||
.cache_key(&mut hasher);
|
||||
let hash_with_creds = hasher.finish();
|
||||
assert_eq!(
|
||||
hash_without_creds, hash_with_creds,
|
||||
"URLs with no password, though with a username, should hash the same as URLs with different user credentials",
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn canonical_url() -> Result<(), url::ParseError> {
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages")?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git@2.0.0")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages@2.0.0")?,
|
||||
);
|
||||
|
||||
// Two URLs should be _not_ considered equal if they point to different repositories.
|
||||
assert_ne!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-packages.git")?,
|
||||
);
|
||||
|
||||
// Two URLs should _not_ be considered equal if they request different subdirectories.
|
||||
assert_ne!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git#subdirectory=pkg_resources/pkg_a")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git#subdirectory=pkg_resources/pkg_b")?,
|
||||
);
|
||||
|
||||
// Two URLs should _not_ be considered equal if they request different commit tags.
|
||||
assert_ne!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git@v1.0.0")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git@v2.0.0")?,
|
||||
);
|
||||
|
||||
// Two URLs that cannot be a base should be considered equal.
|
||||
assert_eq!(
|
||||
CanonicalUrl::parse("git+https:://github.com/pypa/sample-namespace-packages.git")?,
|
||||
CanonicalUrl::parse("git+https:://github.com/pypa/sample-namespace-packages.git")?,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn repository_url() -> Result<(), url::ParseError> {
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages")?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git@2.0.0")?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages@2.0.0")?,
|
||||
);
|
||||
|
||||
// Two URLs should be _not_ considered equal if they point to different repositories.
|
||||
assert_ne!(
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-packages.git")?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal if they map to the same repository, even if they
|
||||
// request different subdirectories.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git#subdirectory=pkg_resources/pkg_a")?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git#subdirectory=pkg_resources/pkg_b")?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal if they map to the same repository, even if they
|
||||
// request different commit tags.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git@v1.0.0")?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git@v2.0.0")?,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -10,6 +10,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -10,6 +10,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -77,73 +77,4 @@ pub fn version() -> VersionInfo {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::{assert_json_snapshot, assert_snapshot};
|
||||
|
||||
use super::{CommitInfo, VersionInfo};
|
||||
|
||||
#[test]
|
||||
fn version_formatting() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: None,
|
||||
};
|
||||
assert_snapshot!(version, @"0.0.0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_formatting_with_commit_info() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: Some(CommitInfo {
|
||||
short_commit_hash: "53b0f5d92".to_string(),
|
||||
commit_hash: "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7".to_string(),
|
||||
last_tag: Some("v0.0.1".to_string()),
|
||||
commit_date: "2023-10-19".to_string(),
|
||||
commits_since_last_tag: 0,
|
||||
}),
|
||||
};
|
||||
assert_snapshot!(version, @"0.0.0 (53b0f5d92 2023-10-19)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_formatting_with_commits_since_last_tag() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: Some(CommitInfo {
|
||||
short_commit_hash: "53b0f5d92".to_string(),
|
||||
commit_hash: "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7".to_string(),
|
||||
last_tag: Some("v0.0.1".to_string()),
|
||||
commit_date: "2023-10-19".to_string(),
|
||||
commits_since_last_tag: 24,
|
||||
}),
|
||||
};
|
||||
assert_snapshot!(version, @"0.0.0+24 (53b0f5d92 2023-10-19)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_serializable() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: Some(CommitInfo {
|
||||
short_commit_hash: "53b0f5d92".to_string(),
|
||||
commit_hash: "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7".to_string(),
|
||||
last_tag: Some("v0.0.1".to_string()),
|
||||
commit_date: "2023-10-19".to_string(),
|
||||
commits_since_last_tag: 0,
|
||||
}),
|
||||
};
|
||||
assert_json_snapshot!(version, @r###"
|
||||
{
|
||||
"version": "0.0.0",
|
||||
"commit_info": {
|
||||
"short_commit_hash": "53b0f5d92",
|
||||
"commit_hash": "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7",
|
||||
"commit_date": "2023-10-19",
|
||||
"last_tag": "v0.0.1",
|
||||
"commits_since_last_tag": 0
|
||||
}
|
||||
}
|
||||
"###);
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
68
crates/uv-cli/src/version/tests.rs
Normal file
68
crates/uv-cli/src/version/tests.rs
Normal file
|
@ -0,0 +1,68 @@
|
|||
use insta::{assert_json_snapshot, assert_snapshot};
|
||||
|
||||
use super::{CommitInfo, VersionInfo};
|
||||
|
||||
#[test]
|
||||
fn version_formatting() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: None,
|
||||
};
|
||||
assert_snapshot!(version, @"0.0.0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_formatting_with_commit_info() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: Some(CommitInfo {
|
||||
short_commit_hash: "53b0f5d92".to_string(),
|
||||
commit_hash: "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7".to_string(),
|
||||
last_tag: Some("v0.0.1".to_string()),
|
||||
commit_date: "2023-10-19".to_string(),
|
||||
commits_since_last_tag: 0,
|
||||
}),
|
||||
};
|
||||
assert_snapshot!(version, @"0.0.0 (53b0f5d92 2023-10-19)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_formatting_with_commits_since_last_tag() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: Some(CommitInfo {
|
||||
short_commit_hash: "53b0f5d92".to_string(),
|
||||
commit_hash: "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7".to_string(),
|
||||
last_tag: Some("v0.0.1".to_string()),
|
||||
commit_date: "2023-10-19".to_string(),
|
||||
commits_since_last_tag: 24,
|
||||
}),
|
||||
};
|
||||
assert_snapshot!(version, @"0.0.0+24 (53b0f5d92 2023-10-19)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_serializable() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: Some(CommitInfo {
|
||||
short_commit_hash: "53b0f5d92".to_string(),
|
||||
commit_hash: "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7".to_string(),
|
||||
last_tag: Some("v0.0.1".to_string()),
|
||||
commit_date: "2023-10-19".to_string(),
|
||||
commits_since_last_tag: 0,
|
||||
}),
|
||||
};
|
||||
assert_json_snapshot!(version, @r#"
|
||||
{
|
||||
"version": "0.0.0",
|
||||
"commit_info": {
|
||||
"short_commit_hash": "53b0f5d92",
|
||||
"commit_hash": "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7",
|
||||
"commit_date": "2023-10-19",
|
||||
"last_tag": "v0.0.1",
|
||||
"commits_since_last_tag": 0
|
||||
}
|
||||
}
|
||||
"#);
|
||||
}
|
|
@ -3,6 +3,9 @@ name = "uv-client"
|
|||
version = "0.0.1"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
995
crates/uv-client/src/html/tests.rs
Normal file
995
crates/uv-client/src/html/tests.rs
Normal file
|
@ -0,0 +1,995 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn parse_sha256() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl#sha256=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"download.pytorch.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/whl/jinja2/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: Some(
|
||||
"6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61",
|
||||
),
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.2-py3-none-any.whl#sha256=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_md5() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl#md5=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"download.pytorch.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/whl/jinja2/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: Some(
|
||||
"6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61",
|
||||
),
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.2-py3-none-any.whl#md5=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_base() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<base href="https://index.python.org/">
|
||||
</head>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl#sha256=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"index.python.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: Some(
|
||||
"6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61",
|
||||
),
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.2-py3-none-any.whl#sha256=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_escaped_fragment() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2+233fca715f49-py3-none-any.whl#sha256=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61">Jinja2-3.1.2+233fca715f49-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"download.pytorch.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/whl/jinja2/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.2+233fca715f49-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: Some(
|
||||
"6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61",
|
||||
),
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.2+233fca715f49-py3-none-any.whl#sha256=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_encoded_fragment() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl#sha256%3D4095ada29e51070f7d199a0a5bdf5c8d8e238e03f0bf4dcc02571e78c9ae800d">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"download.pytorch.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/whl/jinja2/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: Some(
|
||||
"4095ada29e51070f7d199a0a5bdf5c8d8e238e03f0bf4dcc02571e78c9ae800d",
|
||||
),
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.2-py3-none-any.whl#sha256%3D4095ada29e51070f7d199a0a5bdf5c8d8e238e03f0bf4dcc02571e78c9ae800d",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_quoted_filepath() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="cpu/torchtext-0.17.0%2Bcpu-cp39-cp39-win_amd64.whl">cpu/torchtext-0.17.0%2Bcpu-cp39-cp39-win_amd64.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"download.pytorch.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/whl/jinja2/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "torchtext-0.17.0+cpu-cp39-cp39-win_amd64.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "cpu/torchtext-0.17.0%2Bcpu-cp39-cp39-win_amd64.whl",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_missing_hash() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"download.pytorch.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/whl/jinja2/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.2-py3-none-any.whl",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_missing_href() {
|
||||
let text = r"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a>Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
";
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap_err();
|
||||
insta::assert_snapshot!(result, @"Missing href attribute on anchor link");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_empty_href() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap_err();
|
||||
insta::assert_snapshot!(result, @"Missing href attribute on anchor link");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_empty_fragment() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl#">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"download.pytorch.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/whl/jinja2/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.2-py3-none-any.whl#",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_query_string() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl?project=legacy">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"download.pytorch.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/whl/jinja2/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.2-py3-none-any.whl?project=legacy",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_missing_hash_value() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl#sha256">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap_err();
|
||||
insta::assert_snapshot!(result, @"Unexpected fragment (expected `#sha256=...` or similar) on URL: sha256");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_unknown_hash() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl#blake2=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap_err();
|
||||
insta::assert_snapshot!(result, @"Unsupported hash algorithm (expected one of: `md5`, `sha256`, `sha384`, or `sha512`) on: `blake2=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61`");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_flat_index_html() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head><meta http-equiv="Content-Type" content="text/html; charset=utf-8"></head>
|
||||
<body>
|
||||
<a href="https://storage.googleapis.com/jax-releases/cuda100/jaxlib-0.1.52+cuda100-cp36-none-manylinux2010_x86_64.whl">cuda100/jaxlib-0.1.52+cuda100-cp36-none-manylinux2010_x86_64.whl</a><br>
|
||||
<a href="https://storage.googleapis.com/jax-releases/cuda100/jaxlib-0.1.52+cuda100-cp37-none-manylinux2010_x86_64.whl">cuda100/jaxlib-0.1.52+cuda100-cp37-none-manylinux2010_x86_64.whl</a><br>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let base =
|
||||
Url::parse("https://storage.googleapis.com/jax-releases/jax_cuda_releases.html").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"storage.googleapis.com",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/jax-releases/jax_cuda_releases.html",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "jaxlib-0.1.52+cuda100-cp36-none-manylinux2010_x86_64.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "https://storage.googleapis.com/jax-releases/cuda100/jaxlib-0.1.52+cuda100-cp36-none-manylinux2010_x86_64.whl",
|
||||
yanked: None,
|
||||
},
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "jaxlib-0.1.52+cuda100-cp37-none-manylinux2010_x86_64.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "https://storage.googleapis.com/jax-releases/cuda100/jaxlib-0.1.52+cuda100-cp37-none-manylinux2010_x86_64.whl",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
/// Test for AWS Code Artifact
|
||||
///
|
||||
/// See: <https://github.com/astral-sh/uv/issues/1388#issuecomment-1947659088>
|
||||
#[test]
|
||||
fn parse_code_artifact_index_html() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Links for flask</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Links for flask</h1>
|
||||
<a href="0.1/Flask-0.1.tar.gz#sha256=9da884457e910bf0847d396cb4b778ad9f3c3d17db1c5997cb861937bd284237" data-gpg-sig="false" >Flask-0.1.tar.gz</a>
|
||||
<br/>
|
||||
<a href="0.10.1/Flask-0.10.1.tar.gz#sha256=4c83829ff83d408b5e1d4995472265411d2c414112298f2eb4b359d9e4563373" data-gpg-sig="false" >Flask-0.10.1.tar.gz</a>
|
||||
<br/>
|
||||
<a href="3.0.1/flask-3.0.1.tar.gz#sha256=6489f51bb3666def6f314e15f19d50a1869a19ae0e8c9a3641ffe66c77d42403" data-requires-python=">=3.8" data-gpg-sig="false" >flask-3.0.1.tar.gz</a>
|
||||
<br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let base = Url::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask/")
|
||||
.unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"account.d.codeartifact.us-west-2.amazonaws.com",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/pypi/shared-packages-pypi/simple/flask/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Flask-0.1.tar.gz",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: Some(
|
||||
"9da884457e910bf0847d396cb4b778ad9f3c3d17db1c5997cb861937bd284237",
|
||||
),
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "0.1/Flask-0.1.tar.gz#sha256=9da884457e910bf0847d396cb4b778ad9f3c3d17db1c5997cb861937bd284237",
|
||||
yanked: None,
|
||||
},
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Flask-0.10.1.tar.gz",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: Some(
|
||||
"4c83829ff83d408b5e1d4995472265411d2c414112298f2eb4b359d9e4563373",
|
||||
),
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "0.10.1/Flask-0.10.1.tar.gz#sha256=4c83829ff83d408b5e1d4995472265411d2c414112298f2eb4b359d9e4563373",
|
||||
yanked: None,
|
||||
},
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "flask-3.0.1.tar.gz",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: Some(
|
||||
"6489f51bb3666def6f314e15f19d50a1869a19ae0e8c9a3641ffe66c77d42403",
|
||||
),
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: Some(
|
||||
Ok(
|
||||
VersionSpecifiers(
|
||||
[
|
||||
VersionSpecifier {
|
||||
operator: GreaterThanEqual,
|
||||
version: "3.8",
|
||||
},
|
||||
],
|
||||
),
|
||||
),
|
||||
),
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "3.0.1/flask-3.0.1.tar.gz#sha256=6489f51bb3666def6f314e15f19d50a1869a19ae0e8c9a3641ffe66c77d42403",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_file_requires_python_trailing_comma() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl#sha256=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61" data-requires-python=">=3.8,">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"download.pytorch.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/whl/jinja2/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: Some(
|
||||
"6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61",
|
||||
),
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: Some(
|
||||
Ok(
|
||||
VersionSpecifiers(
|
||||
[
|
||||
VersionSpecifier {
|
||||
operator: GreaterThanEqual,
|
||||
version: "3.8",
|
||||
},
|
||||
],
|
||||
),
|
||||
),
|
||||
),
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.2-py3-none-any.whl#sha256=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
/// Respect PEP 714 (see: <https://peps.python.org/pep-0714/>).
|
||||
#[test]
|
||||
fn parse_core_metadata() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl" data-dist-info-metadata="true">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
<a href="/whl/Jinja2-3.1.3-py3-none-any.whl" data-core-metadata="true">Jinja2-3.1.3-py3-none-any.whl</a><br/>
|
||||
<a href="/whl/Jinja2-3.1.4-py3-none-any.whl" data-dist-info-metadata="false">Jinja2-3.1.4-py3-none-any.whl</a><br/>
|
||||
<a href="/whl/Jinja2-3.1.5-py3-none-any.whl" data-core-metadata="false">Jinja2-3.1.5-py3-none-any.whl</a><br/>
|
||||
<a href="/whl/Jinja2-3.1.6-py3-none-any.whl" data-core-metadata="true" data-dist-info-metadata="false">Jinja2-3.1.6-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let base = Url::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask/")
|
||||
.unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"account.d.codeartifact.us-west-2.amazonaws.com",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/pypi/shared-packages-pypi/simple/flask/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: Some(
|
||||
Bool(
|
||||
true,
|
||||
),
|
||||
),
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.2-py3-none-any.whl",
|
||||
yanked: None,
|
||||
},
|
||||
File {
|
||||
core_metadata: Some(
|
||||
Bool(
|
||||
true,
|
||||
),
|
||||
),
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.3-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.3-py3-none-any.whl",
|
||||
yanked: None,
|
||||
},
|
||||
File {
|
||||
core_metadata: Some(
|
||||
Bool(
|
||||
false,
|
||||
),
|
||||
),
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.4-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.4-py3-none-any.whl",
|
||||
yanked: None,
|
||||
},
|
||||
File {
|
||||
core_metadata: Some(
|
||||
Bool(
|
||||
false,
|
||||
),
|
||||
),
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.5-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.5-py3-none-any.whl",
|
||||
yanked: None,
|
||||
},
|
||||
File {
|
||||
core_metadata: Some(
|
||||
Bool(
|
||||
true,
|
||||
),
|
||||
),
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.6-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.6-py3-none-any.whl",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
|
@ -453,326 +453,4 @@ impl CacheControlDirective {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn cache_control_token() {
|
||||
let cc: CacheControl = CacheControlParser::new(["no-cache"]).collect();
|
||||
assert!(cc.no_cache);
|
||||
assert!(!cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_max_age() {
|
||||
let cc: CacheControl = CacheControlParser::new(["max-age=60"]).collect();
|
||||
assert_eq!(Some(60), cc.max_age_seconds);
|
||||
assert!(!cc.must_revalidate);
|
||||
}
|
||||
|
||||
// [RFC 9111 S5.2.1.1] says that client MUST NOT quote max-age, but we
|
||||
// support parsing it that way anyway.
|
||||
//
|
||||
// [RFC 9111 S5.2.1.1]: https://www.rfc-editor.org/rfc/rfc9111.html#section-5.2.1.1
|
||||
#[test]
|
||||
fn cache_control_max_age_quoted() {
|
||||
let cc: CacheControl = CacheControlParser::new([r#"max-age="60""#]).collect();
|
||||
assert_eq!(Some(60), cc.max_age_seconds);
|
||||
assert!(!cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_max_age_invalid() {
|
||||
let cc: CacheControl = CacheControlParser::new(["max-age=6a0"]).collect();
|
||||
assert_eq!(None, cc.max_age_seconds);
|
||||
assert!(cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_immutable() {
|
||||
let cc: CacheControl = CacheControlParser::new(["max-age=31536000, immutable"]).collect();
|
||||
assert_eq!(Some(31_536_000), cc.max_age_seconds);
|
||||
assert!(cc.immutable);
|
||||
assert!(!cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_unrecognized() {
|
||||
let cc: CacheControl = CacheControlParser::new(["lion,max-age=60,zebra"]).collect();
|
||||
assert_eq!(Some(60), cc.max_age_seconds);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_invalid_squashes_remainder() {
|
||||
let cc: CacheControl = CacheControlParser::new(["no-cache,\x00,max-age=60"]).collect();
|
||||
// The invalid data doesn't impact things before it.
|
||||
assert!(cc.no_cache);
|
||||
// The invalid data precludes parsing anything after.
|
||||
assert_eq!(None, cc.max_age_seconds);
|
||||
// The invalid contents should force revalidation.
|
||||
assert!(cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_invalid_squashes_remainder_but_not_other_header_values() {
|
||||
let cc: CacheControl =
|
||||
CacheControlParser::new(["no-cache,\x00,max-age=60", "max-stale=30"]).collect();
|
||||
// The invalid data doesn't impact things before it.
|
||||
assert!(cc.no_cache);
|
||||
// The invalid data precludes parsing anything after
|
||||
// in the same header value, but not in other
|
||||
// header values.
|
||||
assert_eq!(Some(30), cc.max_stale_seconds);
|
||||
// The invalid contents should force revalidation.
|
||||
assert!(cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_token() {
|
||||
let directives = CacheControlParser::new(["no-cache"]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_token_to_token_value() {
|
||||
let directives = CacheControlParser::new(["max-age=60"]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_token_to_quoted_string() {
|
||||
let directives =
|
||||
CacheControlParser::new([r#"private="cookie,x-something-else""#]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![CacheControlDirective {
|
||||
name: "private".to_string(),
|
||||
value: b"cookie,x-something-else".to_vec(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_token_to_quoted_string_with_escape() {
|
||||
let directives =
|
||||
CacheControlParser::new([r#"private="something\"crazy""#]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![CacheControlDirective {
|
||||
name: "private".to_string(),
|
||||
value: br#"something"crazy"#.to_vec(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_multiple_directives() {
|
||||
let header = r#"max-age=60, no-cache, private="cookie", no-transform"#;
|
||||
let directives = CacheControlParser::new([header]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "private".to_string(),
|
||||
value: b"cookie".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-transform".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_multiple_directives_across_multiple_header_values() {
|
||||
let headers = [
|
||||
r"max-age=60, no-cache",
|
||||
r#"private="cookie""#,
|
||||
r"no-transform",
|
||||
];
|
||||
let directives = CacheControlParser::new(headers).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "private".to_string(),
|
||||
value: b"cookie".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-transform".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_one_header_invalid() {
|
||||
let headers = [
|
||||
r"max-age=60, no-cache",
|
||||
r#", private="cookie""#,
|
||||
r"no-transform",
|
||||
];
|
||||
let directives = CacheControlParser::new(headers).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "must-revalidate".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-transform".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_invalid_directive_drops_remainder() {
|
||||
let header = r#"max-age=60, no-cache, ="cookie", no-transform"#;
|
||||
let directives = CacheControlParser::new([header]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "must-revalidate".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_name_normalized() {
|
||||
let header = r"MAX-AGE=60";
|
||||
let directives = CacheControlParser::new([header]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},]
|
||||
);
|
||||
}
|
||||
|
||||
// When a duplicate directive is found, we keep the first one
|
||||
// and add in a `must-revalidate` directive to indicate that
|
||||
// things are stale and the client should do a re-check.
|
||||
#[test]
|
||||
fn cache_control_parse_duplicate_directives() {
|
||||
let header = r"max-age=60, no-cache, max-age=30";
|
||||
let directives = CacheControlParser::new([header]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "must-revalidate".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_duplicate_directives_across_headers() {
|
||||
let headers = [r"max-age=60, no-cache", r"max-age=30"];
|
||||
let directives = CacheControlParser::new(headers).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "must-revalidate".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
// Tests that we don't emit must-revalidate multiple times
|
||||
// even when something is duplicated multiple times.
|
||||
#[test]
|
||||
fn cache_control_parse_duplicate_redux() {
|
||||
let header = r"max-age=60, no-cache, no-cache, max-age=30";
|
||||
let directives = CacheControlParser::new([header]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "must-revalidate".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
320
crates/uv-client/src/httpcache/control/tests.rs
Normal file
320
crates/uv-client/src/httpcache/control/tests.rs
Normal file
|
@ -0,0 +1,320 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn cache_control_token() {
|
||||
let cc: CacheControl = CacheControlParser::new(["no-cache"]).collect();
|
||||
assert!(cc.no_cache);
|
||||
assert!(!cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_max_age() {
|
||||
let cc: CacheControl = CacheControlParser::new(["max-age=60"]).collect();
|
||||
assert_eq!(Some(60), cc.max_age_seconds);
|
||||
assert!(!cc.must_revalidate);
|
||||
}
|
||||
|
||||
// [RFC 9111 S5.2.1.1] says that client MUST NOT quote max-age, but we
|
||||
// support parsing it that way anyway.
|
||||
//
|
||||
// [RFC 9111 S5.2.1.1]: https://www.rfc-editor.org/rfc/rfc9111.html#section-5.2.1.1
|
||||
#[test]
|
||||
fn cache_control_max_age_quoted() {
|
||||
let cc: CacheControl = CacheControlParser::new([r#"max-age="60""#]).collect();
|
||||
assert_eq!(Some(60), cc.max_age_seconds);
|
||||
assert!(!cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_max_age_invalid() {
|
||||
let cc: CacheControl = CacheControlParser::new(["max-age=6a0"]).collect();
|
||||
assert_eq!(None, cc.max_age_seconds);
|
||||
assert!(cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_immutable() {
|
||||
let cc: CacheControl = CacheControlParser::new(["max-age=31536000, immutable"]).collect();
|
||||
assert_eq!(Some(31_536_000), cc.max_age_seconds);
|
||||
assert!(cc.immutable);
|
||||
assert!(!cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_unrecognized() {
|
||||
let cc: CacheControl = CacheControlParser::new(["lion,max-age=60,zebra"]).collect();
|
||||
assert_eq!(Some(60), cc.max_age_seconds);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_invalid_squashes_remainder() {
|
||||
let cc: CacheControl = CacheControlParser::new(["no-cache,\x00,max-age=60"]).collect();
|
||||
// The invalid data doesn't impact things before it.
|
||||
assert!(cc.no_cache);
|
||||
// The invalid data precludes parsing anything after.
|
||||
assert_eq!(None, cc.max_age_seconds);
|
||||
// The invalid contents should force revalidation.
|
||||
assert!(cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_invalid_squashes_remainder_but_not_other_header_values() {
|
||||
let cc: CacheControl =
|
||||
CacheControlParser::new(["no-cache,\x00,max-age=60", "max-stale=30"]).collect();
|
||||
// The invalid data doesn't impact things before it.
|
||||
assert!(cc.no_cache);
|
||||
// The invalid data precludes parsing anything after
|
||||
// in the same header value, but not in other
|
||||
// header values.
|
||||
assert_eq!(Some(30), cc.max_stale_seconds);
|
||||
// The invalid contents should force revalidation.
|
||||
assert!(cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_token() {
|
||||
let directives = CacheControlParser::new(["no-cache"]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_token_to_token_value() {
|
||||
let directives = CacheControlParser::new(["max-age=60"]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_token_to_quoted_string() {
|
||||
let directives =
|
||||
CacheControlParser::new([r#"private="cookie,x-something-else""#]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![CacheControlDirective {
|
||||
name: "private".to_string(),
|
||||
value: b"cookie,x-something-else".to_vec(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_token_to_quoted_string_with_escape() {
|
||||
let directives = CacheControlParser::new([r#"private="something\"crazy""#]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![CacheControlDirective {
|
||||
name: "private".to_string(),
|
||||
value: br#"something"crazy"#.to_vec(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_multiple_directives() {
|
||||
let header = r#"max-age=60, no-cache, private="cookie", no-transform"#;
|
||||
let directives = CacheControlParser::new([header]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "private".to_string(),
|
||||
value: b"cookie".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-transform".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_multiple_directives_across_multiple_header_values() {
|
||||
let headers = [
|
||||
r"max-age=60, no-cache",
|
||||
r#"private="cookie""#,
|
||||
r"no-transform",
|
||||
];
|
||||
let directives = CacheControlParser::new(headers).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "private".to_string(),
|
||||
value: b"cookie".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-transform".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_one_header_invalid() {
|
||||
let headers = [
|
||||
r"max-age=60, no-cache",
|
||||
r#", private="cookie""#,
|
||||
r"no-transform",
|
||||
];
|
||||
let directives = CacheControlParser::new(headers).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "must-revalidate".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-transform".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_invalid_directive_drops_remainder() {
|
||||
let header = r#"max-age=60, no-cache, ="cookie", no-transform"#;
|
||||
let directives = CacheControlParser::new([header]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "must-revalidate".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_name_normalized() {
|
||||
let header = r"MAX-AGE=60";
|
||||
let directives = CacheControlParser::new([header]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},]
|
||||
);
|
||||
}
|
||||
|
||||
// When a duplicate directive is found, we keep the first one
|
||||
// and add in a `must-revalidate` directive to indicate that
|
||||
// things are stale and the client should do a re-check.
|
||||
#[test]
|
||||
fn cache_control_parse_duplicate_directives() {
|
||||
let header = r"max-age=60, no-cache, max-age=30";
|
||||
let directives = CacheControlParser::new([header]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "must-revalidate".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_duplicate_directives_across_headers() {
|
||||
let headers = [r"max-age=60, no-cache", r"max-age=30"];
|
||||
let directives = CacheControlParser::new(headers).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "must-revalidate".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
// Tests that we don't emit must-revalidate multiple times
|
||||
// even when something is duplicated multiple times.
|
||||
#[test]
|
||||
fn cache_control_parse_duplicate_redux() {
|
||||
let header = r"max-age=60, no-cache, no-cache, max-age=30";
|
||||
let directives = CacheControlParser::new([header]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "must-revalidate".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
|
@ -901,107 +901,4 @@ impl Connectivity {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::str::FromStr;
|
||||
|
||||
use url::Url;
|
||||
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pypi_types::{JoinRelativeError, SimpleJson};
|
||||
|
||||
use crate::{html::SimpleHtml, SimpleMetadata, SimpleMetadatum};
|
||||
|
||||
#[test]
|
||||
fn ignore_failing_files() {
|
||||
// 1.7.7 has an invalid requires-python field (double comma), 1.7.8 is valid
|
||||
let response = r#"
|
||||
{
|
||||
"files": [
|
||||
{
|
||||
"core-metadata": false,
|
||||
"data-dist-info-metadata": false,
|
||||
"filename": "pyflyby-1.7.7.tar.gz",
|
||||
"hashes": {
|
||||
"sha256": "0c4d953f405a7be1300b440dbdbc6917011a07d8401345a97e72cd410d5fb291"
|
||||
},
|
||||
"requires-python": ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*, !=3.2.*, !=3.3.*, !=3.4.*,, !=3.5.*, !=3.6.*, <4",
|
||||
"size": 427200,
|
||||
"upload-time": "2022-05-19T09:14:36.591835Z",
|
||||
"url": "https://files.pythonhosted.org/packages/61/93/9fec62902d0b4fc2521333eba047bff4adbba41f1723a6382367f84ee522/pyflyby-1.7.7.tar.gz",
|
||||
"yanked": false
|
||||
},
|
||||
{
|
||||
"core-metadata": false,
|
||||
"data-dist-info-metadata": false,
|
||||
"filename": "pyflyby-1.7.8.tar.gz",
|
||||
"hashes": {
|
||||
"sha256": "1ee37474f6da8f98653dbcc208793f50b7ace1d9066f49e2707750a5ba5d53c6"
|
||||
},
|
||||
"requires-python": ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*, <4",
|
||||
"size": 424460,
|
||||
"upload-time": "2022-08-04T10:42:02.190074Z",
|
||||
"url": "https://files.pythonhosted.org/packages/ad/39/17180d9806a1c50197bc63b25d0f1266f745fc3b23f11439fccb3d6baa50/pyflyby-1.7.8.tar.gz",
|
||||
"yanked": false
|
||||
}
|
||||
]
|
||||
}
|
||||
"#;
|
||||
let data: SimpleJson = serde_json::from_str(response).unwrap();
|
||||
let base = Url::parse("https://pypi.org/simple/pyflyby/").unwrap();
|
||||
let simple_metadata = SimpleMetadata::from_files(
|
||||
data.files,
|
||||
&PackageName::from_str("pyflyby").unwrap(),
|
||||
&base,
|
||||
);
|
||||
let versions: Vec<String> = simple_metadata
|
||||
.iter()
|
||||
.map(|SimpleMetadatum { version, .. }| version.to_string())
|
||||
.collect();
|
||||
assert_eq!(versions, ["1.7.8".to_string()]);
|
||||
}
|
||||
|
||||
/// Test for AWS Code Artifact registry
|
||||
///
|
||||
/// See: <https://github.com/astral-sh/uv/issues/1388>
|
||||
#[test]
|
||||
fn relative_urls_code_artifact() -> Result<(), JoinRelativeError> {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Links for flask</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Links for flask</h1>
|
||||
<a href="0.1/Flask-0.1.tar.gz#sha256=9da884457e910bf0847d396cb4b778ad9f3c3d17db1c5997cb861937bd284237" data-gpg-sig="false" >Flask-0.1.tar.gz</a>
|
||||
<br/>
|
||||
<a href="0.10.1/Flask-0.10.1.tar.gz#sha256=4c83829ff83d408b5e1d4995472265411d2c414112298f2eb4b359d9e4563373" data-gpg-sig="false" >Flask-0.10.1.tar.gz</a>
|
||||
<br/>
|
||||
<a href="3.0.1/flask-3.0.1.tar.gz#sha256=6489f51bb3666def6f314e15f19d50a1869a19ae0e8c9a3641ffe66c77d42403" data-requires-python=">=3.8" data-gpg-sig="false" >flask-3.0.1.tar.gz</a>
|
||||
<br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
|
||||
// Note the lack of a trailing `/` here is important for coverage of url-join behavior
|
||||
let base = Url::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask")
|
||||
.unwrap();
|
||||
let SimpleHtml { base, files } = SimpleHtml::parse(text, &base).unwrap();
|
||||
|
||||
// Test parsing of the file urls
|
||||
let urls = files
|
||||
.iter()
|
||||
.map(|file| uv_pypi_types::base_url_join_relative(base.as_url().as_str(), &file.url))
|
||||
.collect::<Result<Vec<_>, JoinRelativeError>>()?;
|
||||
let urls = urls.iter().map(reqwest::Url::as_str).collect::<Vec<_>>();
|
||||
insta::assert_debug_snapshot!(urls, @r###"
|
||||
[
|
||||
"https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/0.1/Flask-0.1.tar.gz#sha256=9da884457e910bf0847d396cb4b778ad9f3c3d17db1c5997cb861937bd284237",
|
||||
"https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/0.10.1/Flask-0.10.1.tar.gz#sha256=4c83829ff83d408b5e1d4995472265411d2c414112298f2eb4b359d9e4563373",
|
||||
"https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/3.0.1/flask-3.0.1.tar.gz#sha256=6489f51bb3666def6f314e15f19d50a1869a19ae0e8c9a3641ffe66c77d42403",
|
||||
]
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
102
crates/uv-client/src/registry_client/tests.rs
Normal file
102
crates/uv-client/src/registry_client/tests.rs
Normal file
|
@ -0,0 +1,102 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use url::Url;
|
||||
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pypi_types::{JoinRelativeError, SimpleJson};
|
||||
|
||||
use crate::{html::SimpleHtml, SimpleMetadata, SimpleMetadatum};
|
||||
|
||||
#[test]
|
||||
fn ignore_failing_files() {
|
||||
// 1.7.7 has an invalid requires-python field (double comma), 1.7.8 is valid
|
||||
let response = r#"
|
||||
{
|
||||
"files": [
|
||||
{
|
||||
"core-metadata": false,
|
||||
"data-dist-info-metadata": false,
|
||||
"filename": "pyflyby-1.7.7.tar.gz",
|
||||
"hashes": {
|
||||
"sha256": "0c4d953f405a7be1300b440dbdbc6917011a07d8401345a97e72cd410d5fb291"
|
||||
},
|
||||
"requires-python": ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*, !=3.2.*, !=3.3.*, !=3.4.*,, !=3.5.*, !=3.6.*, <4",
|
||||
"size": 427200,
|
||||
"upload-time": "2022-05-19T09:14:36.591835Z",
|
||||
"url": "https://files.pythonhosted.org/packages/61/93/9fec62902d0b4fc2521333eba047bff4adbba41f1723a6382367f84ee522/pyflyby-1.7.7.tar.gz",
|
||||
"yanked": false
|
||||
},
|
||||
{
|
||||
"core-metadata": false,
|
||||
"data-dist-info-metadata": false,
|
||||
"filename": "pyflyby-1.7.8.tar.gz",
|
||||
"hashes": {
|
||||
"sha256": "1ee37474f6da8f98653dbcc208793f50b7ace1d9066f49e2707750a5ba5d53c6"
|
||||
},
|
||||
"requires-python": ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*, <4",
|
||||
"size": 424460,
|
||||
"upload-time": "2022-08-04T10:42:02.190074Z",
|
||||
"url": "https://files.pythonhosted.org/packages/ad/39/17180d9806a1c50197bc63b25d0f1266f745fc3b23f11439fccb3d6baa50/pyflyby-1.7.8.tar.gz",
|
||||
"yanked": false
|
||||
}
|
||||
]
|
||||
}
|
||||
"#;
|
||||
let data: SimpleJson = serde_json::from_str(response).unwrap();
|
||||
let base = Url::parse("https://pypi.org/simple/pyflyby/").unwrap();
|
||||
let simple_metadata = SimpleMetadata::from_files(
|
||||
data.files,
|
||||
&PackageName::from_str("pyflyby").unwrap(),
|
||||
&base,
|
||||
);
|
||||
let versions: Vec<String> = simple_metadata
|
||||
.iter()
|
||||
.map(|SimpleMetadatum { version, .. }| version.to_string())
|
||||
.collect();
|
||||
assert_eq!(versions, ["1.7.8".to_string()]);
|
||||
}
|
||||
|
||||
/// Test for AWS Code Artifact registry
|
||||
///
|
||||
/// See: <https://github.com/astral-sh/uv/issues/1388>
|
||||
#[test]
|
||||
fn relative_urls_code_artifact() -> Result<(), JoinRelativeError> {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Links for flask</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Links for flask</h1>
|
||||
<a href="0.1/Flask-0.1.tar.gz#sha256=9da884457e910bf0847d396cb4b778ad9f3c3d17db1c5997cb861937bd284237" data-gpg-sig="false" >Flask-0.1.tar.gz</a>
|
||||
<br/>
|
||||
<a href="0.10.1/Flask-0.10.1.tar.gz#sha256=4c83829ff83d408b5e1d4995472265411d2c414112298f2eb4b359d9e4563373" data-gpg-sig="false" >Flask-0.10.1.tar.gz</a>
|
||||
<br/>
|
||||
<a href="3.0.1/flask-3.0.1.tar.gz#sha256=6489f51bb3666def6f314e15f19d50a1869a19ae0e8c9a3641ffe66c77d42403" data-requires-python=">=3.8" data-gpg-sig="false" >flask-3.0.1.tar.gz</a>
|
||||
<br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
|
||||
// Note the lack of a trailing `/` here is important for coverage of url-join behavior
|
||||
let base = Url::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask")
|
||||
.unwrap();
|
||||
let SimpleHtml { base, files } = SimpleHtml::parse(text, &base).unwrap();
|
||||
|
||||
// Test parsing of the file urls
|
||||
let urls = files
|
||||
.iter()
|
||||
.map(|file| uv_pypi_types::base_url_join_relative(base.as_url().as_str(), &file.url))
|
||||
.collect::<Result<Vec<_>, JoinRelativeError>>()?;
|
||||
let urls = urls.iter().map(reqwest::Url::as_str).collect::<Vec<_>>();
|
||||
insta::assert_debug_snapshot!(urls, @r###"
|
||||
[
|
||||
"https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/0.1/Flask-0.1.tar.gz#sha256=9da884457e910bf0847d396cb4b778ad9f3c3d17db1c5997cb861937bd284237",
|
||||
"https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/0.10.1/Flask-0.10.1.tar.gz#sha256=4c83829ff83d408b5e1d4995472265411d2c414112298f2eb4b359d9e4563373",
|
||||
"https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/3.0.1/flask-3.0.1.tar.gz#sha256=6489f51bb3666def6f314e15f19d50a1869a19ae0e8c9a3641ffe66c77d42403",
|
||||
]
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
2
crates/uv-client/tests/it/main.rs
Normal file
2
crates/uv-client/tests/it/main.rs
Normal file
|
@ -0,0 +1,2 @@
|
|||
mod remote_metadata;
|
||||
mod user_agent_version;
|
|
@ -9,6 +9,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -354,66 +354,4 @@ pub enum IndexStrategy {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::Error;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn no_build_from_args() -> Result<(), Error> {
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(vec![PackageNameSpecifier::from_str(":all:")?], false),
|
||||
NoBuild::All,
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(vec![PackageNameSpecifier::from_str(":all:")?], true),
|
||||
NoBuild::All,
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(vec![PackageNameSpecifier::from_str(":none:")?], true),
|
||||
NoBuild::All,
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(vec![PackageNameSpecifier::from_str(":none:")?], false),
|
||||
NoBuild::None,
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(
|
||||
vec![
|
||||
PackageNameSpecifier::from_str("foo")?,
|
||||
PackageNameSpecifier::from_str("bar")?
|
||||
],
|
||||
false
|
||||
),
|
||||
NoBuild::Packages(vec![
|
||||
PackageName::from_str("foo")?,
|
||||
PackageName::from_str("bar")?
|
||||
]),
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(
|
||||
vec![
|
||||
PackageNameSpecifier::from_str("test")?,
|
||||
PackageNameSpecifier::All
|
||||
],
|
||||
false
|
||||
),
|
||||
NoBuild::All,
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(
|
||||
vec![
|
||||
PackageNameSpecifier::from_str("foo")?,
|
||||
PackageNameSpecifier::from_str(":none:")?,
|
||||
PackageNameSpecifier::from_str("bar")?
|
||||
],
|
||||
false
|
||||
),
|
||||
NoBuild::Packages(vec![PackageName::from_str("bar")?]),
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
61
crates/uv-configuration/src/build_options/tests.rs
Normal file
61
crates/uv-configuration/src/build_options/tests.rs
Normal file
|
@ -0,0 +1,61 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use anyhow::Error;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn no_build_from_args() -> Result<(), Error> {
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(vec![PackageNameSpecifier::from_str(":all:")?], false),
|
||||
NoBuild::All,
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(vec![PackageNameSpecifier::from_str(":all:")?], true),
|
||||
NoBuild::All,
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(vec![PackageNameSpecifier::from_str(":none:")?], true),
|
||||
NoBuild::All,
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(vec![PackageNameSpecifier::from_str(":none:")?], false),
|
||||
NoBuild::None,
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(
|
||||
vec![
|
||||
PackageNameSpecifier::from_str("foo")?,
|
||||
PackageNameSpecifier::from_str("bar")?
|
||||
],
|
||||
false
|
||||
),
|
||||
NoBuild::Packages(vec![
|
||||
PackageName::from_str("foo")?,
|
||||
PackageName::from_str("bar")?
|
||||
]),
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(
|
||||
vec![
|
||||
PackageNameSpecifier::from_str("test")?,
|
||||
PackageNameSpecifier::All
|
||||
],
|
||||
false
|
||||
),
|
||||
NoBuild::All,
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(
|
||||
vec![
|
||||
PackageNameSpecifier::from_str("foo")?,
|
||||
PackageNameSpecifier::from_str(":none:")?,
|
||||
PackageNameSpecifier::from_str("bar")?
|
||||
],
|
||||
false
|
||||
),
|
||||
NoBuild::Packages(vec![PackageName::from_str("bar")?]),
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -213,82 +213,4 @@ impl<'de> serde::Deserialize<'de> for ConfigSettings {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn collect_config_settings() {
|
||||
let settings: ConfigSettings = vec![
|
||||
ConfigSettingEntry {
|
||||
key: "key".to_string(),
|
||||
value: "value".to_string(),
|
||||
},
|
||||
ConfigSettingEntry {
|
||||
key: "key".to_string(),
|
||||
value: "value2".to_string(),
|
||||
},
|
||||
ConfigSettingEntry {
|
||||
key: "list".to_string(),
|
||||
value: "value3".to_string(),
|
||||
},
|
||||
ConfigSettingEntry {
|
||||
key: "list".to_string(),
|
||||
value: "value4".to_string(),
|
||||
},
|
||||
]
|
||||
.into_iter()
|
||||
.collect();
|
||||
assert_eq!(
|
||||
settings.0.get("key"),
|
||||
Some(&ConfigSettingValue::List(vec![
|
||||
"value".to_string(),
|
||||
"value2".to_string()
|
||||
]))
|
||||
);
|
||||
assert_eq!(
|
||||
settings.0.get("list"),
|
||||
Some(&ConfigSettingValue::List(vec![
|
||||
"value3".to_string(),
|
||||
"value4".to_string()
|
||||
]))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn escape_for_python() {
|
||||
let mut settings = ConfigSettings::default();
|
||||
settings.0.insert(
|
||||
"key".to_string(),
|
||||
ConfigSettingValue::String("value".to_string()),
|
||||
);
|
||||
settings.0.insert(
|
||||
"list".to_string(),
|
||||
ConfigSettingValue::List(vec!["value1".to_string(), "value2".to_string()]),
|
||||
);
|
||||
assert_eq!(
|
||||
settings.escape_for_python(),
|
||||
r#"{"key":"value","list":["value1","value2"]}"#
|
||||
);
|
||||
|
||||
let mut settings = ConfigSettings::default();
|
||||
settings.0.insert(
|
||||
"key".to_string(),
|
||||
ConfigSettingValue::String("Hello, \"world!\"".to_string()),
|
||||
);
|
||||
settings.0.insert(
|
||||
"list".to_string(),
|
||||
ConfigSettingValue::List(vec!["'value1'".to_string()]),
|
||||
);
|
||||
assert_eq!(
|
||||
settings.escape_for_python(),
|
||||
r#"{"key":"Hello, \"world!\"","list":["'value1'"]}"#
|
||||
);
|
||||
|
||||
let mut settings = ConfigSettings::default();
|
||||
settings.0.insert(
|
||||
"key".to_string(),
|
||||
ConfigSettingValue::String("val\\1 {}value".to_string()),
|
||||
);
|
||||
assert_eq!(settings.escape_for_python(), r#"{"key":"val\\1 {}value"}"#);
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
77
crates/uv-configuration/src/config_settings/tests.rs
Normal file
77
crates/uv-configuration/src/config_settings/tests.rs
Normal file
|
@ -0,0 +1,77 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn collect_config_settings() {
|
||||
let settings: ConfigSettings = vec![
|
||||
ConfigSettingEntry {
|
||||
key: "key".to_string(),
|
||||
value: "value".to_string(),
|
||||
},
|
||||
ConfigSettingEntry {
|
||||
key: "key".to_string(),
|
||||
value: "value2".to_string(),
|
||||
},
|
||||
ConfigSettingEntry {
|
||||
key: "list".to_string(),
|
||||
value: "value3".to_string(),
|
||||
},
|
||||
ConfigSettingEntry {
|
||||
key: "list".to_string(),
|
||||
value: "value4".to_string(),
|
||||
},
|
||||
]
|
||||
.into_iter()
|
||||
.collect();
|
||||
assert_eq!(
|
||||
settings.0.get("key"),
|
||||
Some(&ConfigSettingValue::List(vec![
|
||||
"value".to_string(),
|
||||
"value2".to_string()
|
||||
]))
|
||||
);
|
||||
assert_eq!(
|
||||
settings.0.get("list"),
|
||||
Some(&ConfigSettingValue::List(vec![
|
||||
"value3".to_string(),
|
||||
"value4".to_string()
|
||||
]))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn escape_for_python() {
|
||||
let mut settings = ConfigSettings::default();
|
||||
settings.0.insert(
|
||||
"key".to_string(),
|
||||
ConfigSettingValue::String("value".to_string()),
|
||||
);
|
||||
settings.0.insert(
|
||||
"list".to_string(),
|
||||
ConfigSettingValue::List(vec!["value1".to_string(), "value2".to_string()]),
|
||||
);
|
||||
assert_eq!(
|
||||
settings.escape_for_python(),
|
||||
r#"{"key":"value","list":["value1","value2"]}"#
|
||||
);
|
||||
|
||||
let mut settings = ConfigSettings::default();
|
||||
settings.0.insert(
|
||||
"key".to_string(),
|
||||
ConfigSettingValue::String("Hello, \"world!\"".to_string()),
|
||||
);
|
||||
settings.0.insert(
|
||||
"list".to_string(),
|
||||
ConfigSettingValue::List(vec!["'value1'".to_string()]),
|
||||
);
|
||||
assert_eq!(
|
||||
settings.escape_for_python(),
|
||||
r#"{"key":"Hello, \"world!\"","list":["'value1'"]}"#
|
||||
);
|
||||
|
||||
let mut settings = ConfigSettings::default();
|
||||
settings.0.insert(
|
||||
"key".to_string(),
|
||||
ConfigSettingValue::String("val\\1 {}value".to_string()),
|
||||
);
|
||||
assert_eq!(settings.escape_for_python(), r#"{"key":"val\\1 {}value"}"#);
|
||||
}
|
|
@ -145,45 +145,4 @@ impl schemars::JsonSchema for TrustedHost {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[test]
|
||||
fn parse() {
|
||||
assert_eq!(
|
||||
"example.com".parse::<super::TrustedHost>().unwrap(),
|
||||
super::TrustedHost {
|
||||
scheme: None,
|
||||
host: "example.com".to_string(),
|
||||
port: None
|
||||
}
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
"example.com:8080".parse::<super::TrustedHost>().unwrap(),
|
||||
super::TrustedHost {
|
||||
scheme: None,
|
||||
host: "example.com".to_string(),
|
||||
port: Some(8080)
|
||||
}
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
"https://example.com".parse::<super::TrustedHost>().unwrap(),
|
||||
super::TrustedHost {
|
||||
scheme: Some("https".to_string()),
|
||||
host: "example.com".to_string(),
|
||||
port: None
|
||||
}
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
"https://example.com/hello/world"
|
||||
.parse::<super::TrustedHost>()
|
||||
.unwrap(),
|
||||
super::TrustedHost {
|
||||
scheme: Some("https".to_string()),
|
||||
host: "example.com".to_string(),
|
||||
port: None
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
40
crates/uv-configuration/src/trusted_host/tests.rs
Normal file
40
crates/uv-configuration/src/trusted_host/tests.rs
Normal file
|
@ -0,0 +1,40 @@
|
|||
#[test]
|
||||
fn parse() {
|
||||
assert_eq!(
|
||||
"example.com".parse::<super::TrustedHost>().unwrap(),
|
||||
super::TrustedHost {
|
||||
scheme: None,
|
||||
host: "example.com".to_string(),
|
||||
port: None
|
||||
}
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
"example.com:8080".parse::<super::TrustedHost>().unwrap(),
|
||||
super::TrustedHost {
|
||||
scheme: None,
|
||||
host: "example.com".to_string(),
|
||||
port: Some(8080)
|
||||
}
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
"https://example.com".parse::<super::TrustedHost>().unwrap(),
|
||||
super::TrustedHost {
|
||||
scheme: Some("https".to_string()),
|
||||
host: "example.com".to_string(),
|
||||
port: None
|
||||
}
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
"https://example.com/hello/world"
|
||||
.parse::<super::TrustedHost>()
|
||||
.unwrap(),
|
||||
super::TrustedHost {
|
||||
scheme: Some("https".to_string()),
|
||||
host: "example.com".to_string(),
|
||||
port: None
|
||||
}
|
||||
);
|
||||
}
|
|
@ -4,6 +4,9 @@ version = "0.0.1"
|
|||
edition = "2021"
|
||||
description = "Utilities for interacting with the terminal"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -324,22 +324,4 @@ fn emit_possible_options(opt: &clap::Arg, output: &mut String) {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::env;
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
use super::{main, Args};
|
||||
|
||||
#[test]
|
||||
fn test_generate_cli_reference() -> Result<()> {
|
||||
let mode = if env::var("UV_UPDATE_SCHEMA").as_deref() == Ok("1") {
|
||||
Mode::Write
|
||||
} else {
|
||||
Mode::Check
|
||||
};
|
||||
main(&Args { mode })
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
17
crates/uv-dev/src/generate_cli_reference/tests.rs
Normal file
17
crates/uv-dev/src/generate_cli_reference/tests.rs
Normal file
|
@ -0,0 +1,17 @@
|
|||
use std::env;
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
use super::{main, Args};
|
||||
|
||||
#[test]
|
||||
fn test_generate_cli_reference() -> Result<()> {
|
||||
let mode = if env::var("UV_UPDATE_SCHEMA").as_deref() == Ok("1") {
|
||||
Mode::Write
|
||||
} else {
|
||||
Mode::Check
|
||||
};
|
||||
main(&Args { mode })
|
||||
}
|
|
@ -81,22 +81,4 @@ pub(crate) fn main(args: &Args) -> Result<()> {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::env;
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
use super::{main, Args};
|
||||
|
||||
#[test]
|
||||
fn test_generate_json_schema() -> Result<()> {
|
||||
let mode = if env::var("UV_UPDATE_SCHEMA").as_deref() == Ok("1") {
|
||||
Mode::Write
|
||||
} else {
|
||||
Mode::Check
|
||||
};
|
||||
main(&Args { mode })
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
17
crates/uv-dev/src/generate_json_schema/tests.rs
Normal file
17
crates/uv-dev/src/generate_json_schema/tests.rs
Normal file
|
@ -0,0 +1,17 @@
|
|||
use std::env;
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
use super::{main, Args};
|
||||
|
||||
#[test]
|
||||
fn test_generate_json_schema() -> Result<()> {
|
||||
let mode = if env::var("UV_UPDATE_SCHEMA").as_deref() == Ok("1") {
|
||||
Mode::Write
|
||||
} else {
|
||||
Mode::Check
|
||||
};
|
||||
main(&Args { mode })
|
||||
}
|
|
@ -350,22 +350,4 @@ impl Visit for CollectOptionsVisitor {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::env;
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
use super::{main, Args};
|
||||
|
||||
#[test]
|
||||
fn test_generate_options_reference() -> Result<()> {
|
||||
let mode = if env::var("UV_UPDATE_SCHEMA").as_deref() == Ok("1") {
|
||||
Mode::Write
|
||||
} else {
|
||||
Mode::Check
|
||||
};
|
||||
main(&Args { mode })
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
17
crates/uv-dev/src/generate_options_reference/tests.rs
Normal file
17
crates/uv-dev/src/generate_options_reference/tests.rs
Normal file
|
@ -0,0 +1,17 @@
|
|||
use std::env;
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
use super::{main, Args};
|
||||
|
||||
#[test]
|
||||
fn test_generate_options_reference() -> Result<()> {
|
||||
let mode = if env::var("UV_UPDATE_SCHEMA").as_deref() == Ok("1") {
|
||||
Mode::Write
|
||||
} else {
|
||||
Mode::Check
|
||||
};
|
||||
main(&Args { mode })
|
||||
}
|
|
@ -10,6 +10,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -9,6 +9,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -80,38 +80,4 @@ impl FromStr for EggInfoFilename {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn egg_info_filename() {
|
||||
let filename = "zstandard-0.22.0-py3.12-darwin.egg-info";
|
||||
let parsed = EggInfoFilename::from_str(filename).unwrap();
|
||||
assert_eq!(parsed.name.as_ref(), "zstandard");
|
||||
assert_eq!(
|
||||
parsed.version.map(|v| v.to_string()),
|
||||
Some("0.22.0".to_string())
|
||||
);
|
||||
|
||||
let filename = "zstandard-0.22.0-py3.12.egg-info";
|
||||
let parsed = EggInfoFilename::from_str(filename).unwrap();
|
||||
assert_eq!(parsed.name.as_ref(), "zstandard");
|
||||
assert_eq!(
|
||||
parsed.version.map(|v| v.to_string()),
|
||||
Some("0.22.0".to_string())
|
||||
);
|
||||
|
||||
let filename = "zstandard-0.22.0.egg-info";
|
||||
let parsed = EggInfoFilename::from_str(filename).unwrap();
|
||||
assert_eq!(parsed.name.as_ref(), "zstandard");
|
||||
assert_eq!(
|
||||
parsed.version.map(|v| v.to_string()),
|
||||
Some("0.22.0".to_string())
|
||||
);
|
||||
|
||||
let filename = "zstandard.egg-info";
|
||||
let parsed = EggInfoFilename::from_str(filename).unwrap();
|
||||
assert_eq!(parsed.name.as_ref(), "zstandard");
|
||||
assert!(parsed.version.is_none());
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
33
crates/uv-distribution-filename/src/egg/tests.rs
Normal file
33
crates/uv-distribution-filename/src/egg/tests.rs
Normal file
|
@ -0,0 +1,33 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn egg_info_filename() {
|
||||
let filename = "zstandard-0.22.0-py3.12-darwin.egg-info";
|
||||
let parsed = EggInfoFilename::from_str(filename).unwrap();
|
||||
assert_eq!(parsed.name.as_ref(), "zstandard");
|
||||
assert_eq!(
|
||||
parsed.version.map(|v| v.to_string()),
|
||||
Some("0.22.0".to_string())
|
||||
);
|
||||
|
||||
let filename = "zstandard-0.22.0-py3.12.egg-info";
|
||||
let parsed = EggInfoFilename::from_str(filename).unwrap();
|
||||
assert_eq!(parsed.name.as_ref(), "zstandard");
|
||||
assert_eq!(
|
||||
parsed.version.map(|v| v.to_string()),
|
||||
Some("0.22.0".to_string())
|
||||
);
|
||||
|
||||
let filename = "zstandard-0.22.0.egg-info";
|
||||
let parsed = EggInfoFilename::from_str(filename).unwrap();
|
||||
assert_eq!(parsed.name.as_ref(), "zstandard");
|
||||
assert_eq!(
|
||||
parsed.version.map(|v| v.to_string()),
|
||||
Some("0.22.0".to_string())
|
||||
);
|
||||
|
||||
let filename = "zstandard.egg-info";
|
||||
let parsed = EggInfoFilename::from_str(filename).unwrap();
|
||||
assert_eq!(parsed.name.as_ref(), "zstandard");
|
||||
assert!(parsed.version.is_none());
|
||||
}
|
|
@ -170,58 +170,4 @@ enum SourceDistFilenameErrorKind {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::str::FromStr;
|
||||
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
use crate::{SourceDistExtension, SourceDistFilename};
|
||||
|
||||
/// Only test already normalized names since the parsing is lossy
|
||||
///
|
||||
/// <https://packaging.python.org/en/latest/specifications/source-distribution-format/#source-distribution-file-name>
|
||||
/// <https://packaging.python.org/en/latest/specifications/binary-distribution-format/#escaping-and-unicode>
|
||||
#[test]
|
||||
fn roundtrip() {
|
||||
for normalized in [
|
||||
"foo_lib-1.2.3.zip",
|
||||
"foo_lib-1.2.3a3.zip",
|
||||
"foo_lib-1.2.3.tar.gz",
|
||||
"foo_lib-1.2.3.tar.bz2",
|
||||
"foo_lib-1.2.3.tar.zst",
|
||||
] {
|
||||
let ext = SourceDistExtension::from_path(normalized).unwrap();
|
||||
assert_eq!(
|
||||
SourceDistFilename::parse(
|
||||
normalized,
|
||||
ext,
|
||||
&PackageName::from_str("foo_lib").unwrap()
|
||||
)
|
||||
.unwrap()
|
||||
.to_string(),
|
||||
normalized
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn errors() {
|
||||
for invalid in ["b-1.2.3.zip", "a-1.2.3-gamma.3.zip"] {
|
||||
let ext = SourceDistExtension::from_path(invalid).unwrap();
|
||||
assert!(
|
||||
SourceDistFilename::parse(invalid, ext, &PackageName::from_str("a").unwrap())
|
||||
.is_err()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn name_too_long() {
|
||||
assert!(SourceDistFilename::parse(
|
||||
"foo.zip",
|
||||
SourceDistExtension::Zip,
|
||||
&PackageName::from_str("foo-lib").unwrap()
|
||||
)
|
||||
.is_err());
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
48
crates/uv-distribution-filename/src/source_dist/tests.rs
Normal file
48
crates/uv-distribution-filename/src/source_dist/tests.rs
Normal file
|
@ -0,0 +1,48 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
use crate::{SourceDistExtension, SourceDistFilename};
|
||||
|
||||
/// Only test already normalized names since the parsing is lossy
|
||||
///
|
||||
/// <https://packaging.python.org/en/latest/specifications/source-distribution-format/#source-distribution-file-name>
|
||||
/// <https://packaging.python.org/en/latest/specifications/binary-distribution-format/#escaping-and-unicode>
|
||||
#[test]
|
||||
fn roundtrip() {
|
||||
for normalized in [
|
||||
"foo_lib-1.2.3.zip",
|
||||
"foo_lib-1.2.3a3.zip",
|
||||
"foo_lib-1.2.3.tar.gz",
|
||||
"foo_lib-1.2.3.tar.bz2",
|
||||
"foo_lib-1.2.3.tar.zst",
|
||||
] {
|
||||
let ext = SourceDistExtension::from_path(normalized).unwrap();
|
||||
assert_eq!(
|
||||
SourceDistFilename::parse(normalized, ext, &PackageName::from_str("foo_lib").unwrap())
|
||||
.unwrap()
|
||||
.to_string(),
|
||||
normalized
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn errors() {
|
||||
for invalid in ["b-1.2.3.zip", "a-1.2.3-gamma.3.zip"] {
|
||||
let ext = SourceDistExtension::from_path(invalid).unwrap();
|
||||
assert!(
|
||||
SourceDistFilename::parse(invalid, ext, &PackageName::from_str("a").unwrap()).is_err()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn name_too_long() {
|
||||
assert!(SourceDistFilename::parse(
|
||||
"foo.zip",
|
||||
SourceDistExtension::Zip,
|
||||
&PackageName::from_str("foo-lib").unwrap()
|
||||
)
|
||||
.is_err());
|
||||
}
|
|
@ -234,101 +234,4 @@ pub enum WheelFilenameError {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn err_not_whl_extension() {
|
||||
let err = WheelFilename::from_str("foo.rs").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo.rs" is invalid: Must end with .whl"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_1_part_empty() {
|
||||
let err = WheelFilename::from_str(".whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename ".whl" is invalid: Must have a version"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_1_part_no_version() {
|
||||
let err = WheelFilename::from_str("foo.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo.whl" is invalid: Must have a version"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_2_part_no_pythontag() {
|
||||
let err = WheelFilename::from_str("foo-version.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-version.whl" is invalid: Must have a Python tag"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_3_part_no_abitag() {
|
||||
let err = WheelFilename::from_str("foo-version-python.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-version-python.whl" is invalid: Must have an ABI tag"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_4_part_no_platformtag() {
|
||||
let err = WheelFilename::from_str("foo-version-python-abi.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-version-python-abi.whl" is invalid: Must have a platform tag"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_too_many_parts() {
|
||||
let err =
|
||||
WheelFilename::from_str("foo-1.2.3-build-python-abi-platform-oops.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-1.2.3-build-python-abi-platform-oops.whl" is invalid: Must have 5 or 6 components, but has more"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_invalid_package_name() {
|
||||
let err = WheelFilename::from_str("f!oo-1.2.3-python-abi-platform.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "f!oo-1.2.3-python-abi-platform.whl" has an invalid package name"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_invalid_version() {
|
||||
let err = WheelFilename::from_str("foo-x.y.z-python-abi-platform.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-x.y.z-python-abi-platform.whl" has an invalid version: expected version to start with a number, but no leading ASCII digits were found"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_invalid_build_tag() {
|
||||
let err = WheelFilename::from_str("foo-1.2.3-tag-python-abi-platform.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-1.2.3-tag-python-abi-platform.whl" has an invalid build tag: must start with a digit"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ok_single_tags() {
|
||||
insta::assert_debug_snapshot!(WheelFilename::from_str("foo-1.2.3-foo-bar-baz.whl"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ok_multiple_tags() {
|
||||
insta::assert_debug_snapshot!(WheelFilename::from_str(
|
||||
"foo-1.2.3-ab.cd.ef-gh-ij.kl.mn.op.qr.st.whl"
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ok_build_tag() {
|
||||
insta::assert_debug_snapshot!(WheelFilename::from_str(
|
||||
"foo-1.2.3-202206090410-python-abi-platform.whl"
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_and_to_string() {
|
||||
let wheel_names = &[
|
||||
"django_allauth-0.51.0-py3-none-any.whl",
|
||||
"osm2geojson-0.2.4-py3-none-any.whl",
|
||||
"numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
|
||||
];
|
||||
for wheel_name in wheel_names {
|
||||
assert_eq!(
|
||||
WheelFilename::from_str(wheel_name).unwrap().to_string(),
|
||||
*wheel_name
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
---
|
||||
source: crates/uv-distribution-filename/src/wheel/tests.rs
|
||||
expression: "WheelFilename::from_str(\"foo-1.2.3-202206090410-python-abi-platform.whl\")"
|
||||
---
|
||||
Ok(
|
||||
WheelFilename {
|
||||
name: PackageName(
|
||||
"foo",
|
||||
),
|
||||
version: "1.2.3",
|
||||
build_tag: Some(
|
||||
BuildTag(
|
||||
202206090410,
|
||||
None,
|
||||
),
|
||||
),
|
||||
python_tag: [
|
||||
"python",
|
||||
],
|
||||
abi_tag: [
|
||||
"abi",
|
||||
],
|
||||
platform_tag: [
|
||||
"platform",
|
||||
],
|
||||
},
|
||||
)
|
|
@ -0,0 +1,29 @@
|
|||
---
|
||||
source: crates/uv-distribution-filename/src/wheel/tests.rs
|
||||
expression: "WheelFilename::from_str(\"foo-1.2.3-ab.cd.ef-gh-ij.kl.mn.op.qr.st.whl\")"
|
||||
---
|
||||
Ok(
|
||||
WheelFilename {
|
||||
name: PackageName(
|
||||
"foo",
|
||||
),
|
||||
version: "1.2.3",
|
||||
build_tag: None,
|
||||
python_tag: [
|
||||
"ab",
|
||||
"cd",
|
||||
"ef",
|
||||
],
|
||||
abi_tag: [
|
||||
"gh",
|
||||
],
|
||||
platform_tag: [
|
||||
"ij",
|
||||
"kl",
|
||||
"mn",
|
||||
"op",
|
||||
"qr",
|
||||
"st",
|
||||
],
|
||||
},
|
||||
)
|
|
@ -0,0 +1,22 @@
|
|||
---
|
||||
source: crates/uv-distribution-filename/src/wheel/tests.rs
|
||||
expression: "WheelFilename::from_str(\"foo-1.2.3-foo-bar-baz.whl\")"
|
||||
---
|
||||
Ok(
|
||||
WheelFilename {
|
||||
name: PackageName(
|
||||
"foo",
|
||||
),
|
||||
version: "1.2.3",
|
||||
build_tag: None,
|
||||
python_tag: [
|
||||
"foo",
|
||||
],
|
||||
abi_tag: [
|
||||
"bar",
|
||||
],
|
||||
platform_tag: [
|
||||
"baz",
|
||||
],
|
||||
},
|
||||
)
|
95
crates/uv-distribution-filename/src/wheel/tests.rs
Normal file
95
crates/uv-distribution-filename/src/wheel/tests.rs
Normal file
|
@ -0,0 +1,95 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn err_not_whl_extension() {
|
||||
let err = WheelFilename::from_str("foo.rs").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo.rs" is invalid: Must end with .whl"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_1_part_empty() {
|
||||
let err = WheelFilename::from_str(".whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename ".whl" is invalid: Must have a version"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_1_part_no_version() {
|
||||
let err = WheelFilename::from_str("foo.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo.whl" is invalid: Must have a version"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_2_part_no_pythontag() {
|
||||
let err = WheelFilename::from_str("foo-version.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-version.whl" is invalid: Must have a Python tag"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_3_part_no_abitag() {
|
||||
let err = WheelFilename::from_str("foo-version-python.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-version-python.whl" is invalid: Must have an ABI tag"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_4_part_no_platformtag() {
|
||||
let err = WheelFilename::from_str("foo-version-python-abi.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-version-python-abi.whl" is invalid: Must have a platform tag"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_too_many_parts() {
|
||||
let err = WheelFilename::from_str("foo-1.2.3-build-python-abi-platform-oops.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-1.2.3-build-python-abi-platform-oops.whl" is invalid: Must have 5 or 6 components, but has more"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_invalid_package_name() {
|
||||
let err = WheelFilename::from_str("f!oo-1.2.3-python-abi-platform.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "f!oo-1.2.3-python-abi-platform.whl" has an invalid package name"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_invalid_version() {
|
||||
let err = WheelFilename::from_str("foo-x.y.z-python-abi-platform.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-x.y.z-python-abi-platform.whl" has an invalid version: expected version to start with a number, but no leading ASCII digits were found"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_invalid_build_tag() {
|
||||
let err = WheelFilename::from_str("foo-1.2.3-tag-python-abi-platform.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-1.2.3-tag-python-abi-platform.whl" has an invalid build tag: must start with a digit"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ok_single_tags() {
|
||||
insta::assert_debug_snapshot!(WheelFilename::from_str("foo-1.2.3-foo-bar-baz.whl"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ok_multiple_tags() {
|
||||
insta::assert_debug_snapshot!(WheelFilename::from_str(
|
||||
"foo-1.2.3-ab.cd.ef-gh-ij.kl.mn.op.qr.st.whl"
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ok_build_tag() {
|
||||
insta::assert_debug_snapshot!(WheelFilename::from_str(
|
||||
"foo-1.2.3-202206090410-python-abi-platform.whl"
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_and_to_string() {
|
||||
let wheel_names = &[
|
||||
"django_allauth-0.51.0-py3-none-any.whl",
|
||||
"osm2geojson-0.2.4-py3-none-any.whl",
|
||||
"numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
|
||||
];
|
||||
for wheel_name in wheel_names {
|
||||
assert_eq!(
|
||||
WheelFilename::from_str(wheel_name).unwrap().to_string(),
|
||||
*wheel_name
|
||||
);
|
||||
}
|
||||
}
|
|
@ -9,6 +9,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -9,6 +9,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -9,6 +9,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -9,6 +9,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -397,108 +397,4 @@ impl<'de> serde::de::Deserialize<'de> for PortablePathBuf {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_normalize_url() {
|
||||
if cfg!(windows) {
|
||||
assert_eq!(
|
||||
normalize_url_path("/C:/Users/ferris/wheel-0.42.0.tar.gz"),
|
||||
"C:\\Users\\ferris\\wheel-0.42.0.tar.gz"
|
||||
);
|
||||
} else {
|
||||
assert_eq!(
|
||||
normalize_url_path("/C:/Users/ferris/wheel-0.42.0.tar.gz"),
|
||||
"/C:/Users/ferris/wheel-0.42.0.tar.gz"
|
||||
);
|
||||
}
|
||||
|
||||
if cfg!(windows) {
|
||||
assert_eq!(
|
||||
normalize_url_path("./ferris/wheel-0.42.0.tar.gz"),
|
||||
".\\ferris\\wheel-0.42.0.tar.gz"
|
||||
);
|
||||
} else {
|
||||
assert_eq!(
|
||||
normalize_url_path("./ferris/wheel-0.42.0.tar.gz"),
|
||||
"./ferris/wheel-0.42.0.tar.gz"
|
||||
);
|
||||
}
|
||||
|
||||
if cfg!(windows) {
|
||||
assert_eq!(
|
||||
normalize_url_path("./wheel%20cache/wheel-0.42.0.tar.gz"),
|
||||
".\\wheel cache\\wheel-0.42.0.tar.gz"
|
||||
);
|
||||
} else {
|
||||
assert_eq!(
|
||||
normalize_url_path("./wheel%20cache/wheel-0.42.0.tar.gz"),
|
||||
"./wheel cache/wheel-0.42.0.tar.gz"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_path() {
|
||||
let path = Path::new("/a/b/../c/./d");
|
||||
let normalized = normalize_absolute_path(path).unwrap();
|
||||
assert_eq!(normalized, Path::new("/a/c/d"));
|
||||
|
||||
let path = Path::new("/a/../c/./d");
|
||||
let normalized = normalize_absolute_path(path).unwrap();
|
||||
assert_eq!(normalized, Path::new("/c/d"));
|
||||
|
||||
// This should be an error.
|
||||
let path = Path::new("/a/../../c/./d");
|
||||
let err = normalize_absolute_path(path).unwrap_err();
|
||||
assert_eq!(err.kind(), std::io::ErrorKind::InvalidInput);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_relative_to() {
|
||||
assert_eq!(
|
||||
relative_to(
|
||||
Path::new("/home/ferris/carcinization/lib/python/site-packages/foo/__init__.py"),
|
||||
Path::new("/home/ferris/carcinization/lib/python/site-packages"),
|
||||
)
|
||||
.unwrap(),
|
||||
Path::new("foo/__init__.py")
|
||||
);
|
||||
assert_eq!(
|
||||
relative_to(
|
||||
Path::new("/home/ferris/carcinization/lib/marker.txt"),
|
||||
Path::new("/home/ferris/carcinization/lib/python/site-packages"),
|
||||
)
|
||||
.unwrap(),
|
||||
Path::new("../../marker.txt")
|
||||
);
|
||||
assert_eq!(
|
||||
relative_to(
|
||||
Path::new("/home/ferris/carcinization/bin/foo_launcher"),
|
||||
Path::new("/home/ferris/carcinization/lib/python/site-packages"),
|
||||
)
|
||||
.unwrap(),
|
||||
Path::new("../../../bin/foo_launcher")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_relative() {
|
||||
let cases = [
|
||||
(
|
||||
"../../workspace-git-path-dep-test/packages/c/../../packages/d",
|
||||
"../../workspace-git-path-dep-test/packages/d",
|
||||
),
|
||||
(
|
||||
"workspace-git-path-dep-test/packages/c/../../packages/d",
|
||||
"workspace-git-path-dep-test/packages/d",
|
||||
),
|
||||
("./a/../../b", "../b"),
|
||||
("/usr/../../foo", "/../foo"),
|
||||
];
|
||||
for (input, expected) in cases {
|
||||
assert_eq!(normalize_path(Path::new(input)), Path::new(expected));
|
||||
}
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
103
crates/uv-fs/src/path/tests.rs
Normal file
103
crates/uv-fs/src/path/tests.rs
Normal file
|
@ -0,0 +1,103 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_normalize_url() {
|
||||
if cfg!(windows) {
|
||||
assert_eq!(
|
||||
normalize_url_path("/C:/Users/ferris/wheel-0.42.0.tar.gz"),
|
||||
"C:\\Users\\ferris\\wheel-0.42.0.tar.gz"
|
||||
);
|
||||
} else {
|
||||
assert_eq!(
|
||||
normalize_url_path("/C:/Users/ferris/wheel-0.42.0.tar.gz"),
|
||||
"/C:/Users/ferris/wheel-0.42.0.tar.gz"
|
||||
);
|
||||
}
|
||||
|
||||
if cfg!(windows) {
|
||||
assert_eq!(
|
||||
normalize_url_path("./ferris/wheel-0.42.0.tar.gz"),
|
||||
".\\ferris\\wheel-0.42.0.tar.gz"
|
||||
);
|
||||
} else {
|
||||
assert_eq!(
|
||||
normalize_url_path("./ferris/wheel-0.42.0.tar.gz"),
|
||||
"./ferris/wheel-0.42.0.tar.gz"
|
||||
);
|
||||
}
|
||||
|
||||
if cfg!(windows) {
|
||||
assert_eq!(
|
||||
normalize_url_path("./wheel%20cache/wheel-0.42.0.tar.gz"),
|
||||
".\\wheel cache\\wheel-0.42.0.tar.gz"
|
||||
);
|
||||
} else {
|
||||
assert_eq!(
|
||||
normalize_url_path("./wheel%20cache/wheel-0.42.0.tar.gz"),
|
||||
"./wheel cache/wheel-0.42.0.tar.gz"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_path() {
|
||||
let path = Path::new("/a/b/../c/./d");
|
||||
let normalized = normalize_absolute_path(path).unwrap();
|
||||
assert_eq!(normalized, Path::new("/a/c/d"));
|
||||
|
||||
let path = Path::new("/a/../c/./d");
|
||||
let normalized = normalize_absolute_path(path).unwrap();
|
||||
assert_eq!(normalized, Path::new("/c/d"));
|
||||
|
||||
// This should be an error.
|
||||
let path = Path::new("/a/../../c/./d");
|
||||
let err = normalize_absolute_path(path).unwrap_err();
|
||||
assert_eq!(err.kind(), std::io::ErrorKind::InvalidInput);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_relative_to() {
|
||||
assert_eq!(
|
||||
relative_to(
|
||||
Path::new("/home/ferris/carcinization/lib/python/site-packages/foo/__init__.py"),
|
||||
Path::new("/home/ferris/carcinization/lib/python/site-packages"),
|
||||
)
|
||||
.unwrap(),
|
||||
Path::new("foo/__init__.py")
|
||||
);
|
||||
assert_eq!(
|
||||
relative_to(
|
||||
Path::new("/home/ferris/carcinization/lib/marker.txt"),
|
||||
Path::new("/home/ferris/carcinization/lib/python/site-packages"),
|
||||
)
|
||||
.unwrap(),
|
||||
Path::new("../../marker.txt")
|
||||
);
|
||||
assert_eq!(
|
||||
relative_to(
|
||||
Path::new("/home/ferris/carcinization/bin/foo_launcher"),
|
||||
Path::new("/home/ferris/carcinization/lib/python/site-packages"),
|
||||
)
|
||||
.unwrap(),
|
||||
Path::new("../../../bin/foo_launcher")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_relative() {
|
||||
let cases = [
|
||||
(
|
||||
"../../workspace-git-path-dep-test/packages/c/../../packages/d",
|
||||
"../../workspace-git-path-dep-test/packages/d",
|
||||
),
|
||||
(
|
||||
"workspace-git-path-dep-test/packages/c/../../packages/d",
|
||||
"workspace-git-path-dep-test/packages/d",
|
||||
),
|
||||
("./a/../../b", "../b"),
|
||||
("/usr/../../foo", "/../foo"),
|
||||
];
|
||||
for (input, expected) in cases {
|
||||
assert_eq!(normalize_path(Path::new(input)), Path::new(expected));
|
||||
}
|
||||
}
|
|
@ -9,6 +9,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -112,19 +112,4 @@ impl Display for GitOid {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::str::FromStr;
|
||||
|
||||
use super::{GitOid, OidParseError};
|
||||
|
||||
#[test]
|
||||
fn git_oid() {
|
||||
GitOid::from_str("4a23745badf5bf5ef7928f1e346e9986bd696d82").unwrap();
|
||||
|
||||
assert_eq!(GitOid::from_str(""), Err(OidParseError::Empty));
|
||||
assert_eq!(
|
||||
GitOid::from_str(&str::repeat("a", 41)),
|
||||
Err(OidParseError::TooLong)
|
||||
);
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
14
crates/uv-git/src/sha/tests.rs
Normal file
14
crates/uv-git/src/sha/tests.rs
Normal file
|
@ -0,0 +1,14 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use super::{GitOid, OidParseError};
|
||||
|
||||
#[test]
|
||||
fn git_oid() {
|
||||
GitOid::from_str("4a23745badf5bf5ef7928f1e346e9986bd696d82").unwrap();
|
||||
|
||||
assert_eq!(GitOid::from_str(""), Err(OidParseError::Empty));
|
||||
assert_eq!(
|
||||
GitOid::from_str(&str::repeat("a", 41)),
|
||||
Err(OidParseError::TooLong)
|
||||
);
|
||||
}
|
|
@ -17,6 +17,7 @@ license = { workspace = true }
|
|||
workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
name = "uv_install_wheel"
|
||||
|
||||
[dependencies]
|
||||
|
|
|
@ -9,6 +9,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@ edition = "2021"
|
|||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
|
|
@ -9,6 +9,9 @@ repository.workspace = true
|
|||
authors.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
uv-distribution-filename = { workspace = true }
|
||||
uv-normalize = { workspace = true }
|
||||
|
|
|
@ -4,6 +4,9 @@ version = "0.0.1"
|
|||
edition = "2021"
|
||||
description = "Normalization for distribution, package and extra names."
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -86,23 +86,4 @@ impl AsRef<str> for DistInfoName<'_> {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn normalize() {
|
||||
let inputs = [
|
||||
"friendly-bard",
|
||||
"Friendly-Bard",
|
||||
"FRIENDLY-BARD",
|
||||
"friendly.bard",
|
||||
"friendly_bard",
|
||||
"friendly--bard",
|
||||
"friendly-.bard",
|
||||
"FrIeNdLy-._.-bArD",
|
||||
];
|
||||
for input in inputs {
|
||||
assert_eq!(DistInfoName::normalize(input), "friendly-bard");
|
||||
}
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
18
crates/uv-normalize/src/dist_info_name/tests.rs
Normal file
18
crates/uv-normalize/src/dist_info_name/tests.rs
Normal file
|
@ -0,0 +1,18 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn normalize() {
|
||||
let inputs = [
|
||||
"friendly-bard",
|
||||
"Friendly-Bard",
|
||||
"FRIENDLY-BARD",
|
||||
"friendly.bard",
|
||||
"friendly_bard",
|
||||
"friendly--bard",
|
||||
"friendly-.bard",
|
||||
"FrIeNdLy-._.-bArD",
|
||||
];
|
||||
for input in inputs {
|
||||
assert_eq!(DistInfoName::normalize(input), "friendly-bard");
|
||||
}
|
||||
}
|
|
@ -120,79 +120,4 @@ impl Display for InvalidNameError {
|
|||
impl Error for InvalidNameError {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn normalize() {
|
||||
let inputs = [
|
||||
"friendly-bard",
|
||||
"Friendly-Bard",
|
||||
"FRIENDLY-BARD",
|
||||
"friendly.bard",
|
||||
"friendly_bard",
|
||||
"friendly--bard",
|
||||
"friendly-.bard",
|
||||
"FrIeNdLy-._.-bArD",
|
||||
];
|
||||
for input in inputs {
|
||||
assert_eq!(validate_and_normalize_ref(input).unwrap(), "friendly-bard");
|
||||
assert_eq!(
|
||||
validate_and_normalize_owned(input.to_string()).unwrap(),
|
||||
"friendly-bard"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check() {
|
||||
let inputs = ["friendly-bard", "friendlybard"];
|
||||
for input in inputs {
|
||||
assert!(is_normalized(input).unwrap(), "{input:?}");
|
||||
}
|
||||
|
||||
let inputs = [
|
||||
"friendly.bard",
|
||||
"friendly.BARD",
|
||||
"friendly_bard",
|
||||
"friendly--bard",
|
||||
"friendly-.bard",
|
||||
"FrIeNdLy-._.-bArD",
|
||||
];
|
||||
for input in inputs {
|
||||
assert!(!is_normalized(input).unwrap(), "{input:?}");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unchanged() {
|
||||
// Unchanged
|
||||
let unchanged = ["friendly-bard", "1okay", "okay2"];
|
||||
for input in unchanged {
|
||||
assert_eq!(validate_and_normalize_ref(input).unwrap(), input);
|
||||
assert_eq!(
|
||||
validate_and_normalize_owned(input.to_string()).unwrap(),
|
||||
input
|
||||
);
|
||||
assert!(is_normalized(input).unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn failures() {
|
||||
let failures = [
|
||||
" starts-with-space",
|
||||
"-starts-with-dash",
|
||||
"ends-with-dash-",
|
||||
"ends-with-space ",
|
||||
"includes!invalid-char",
|
||||
"space in middle",
|
||||
"alpha-α",
|
||||
];
|
||||
for input in failures {
|
||||
assert!(validate_and_normalize_ref(input).is_err());
|
||||
assert!(validate_and_normalize_owned(input.to_string()).is_err());
|
||||
assert!(is_normalized(input).is_err());
|
||||
}
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
74
crates/uv-normalize/src/tests.rs
Normal file
74
crates/uv-normalize/src/tests.rs
Normal file
|
@ -0,0 +1,74 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn normalize() {
|
||||
let inputs = [
|
||||
"friendly-bard",
|
||||
"Friendly-Bard",
|
||||
"FRIENDLY-BARD",
|
||||
"friendly.bard",
|
||||
"friendly_bard",
|
||||
"friendly--bard",
|
||||
"friendly-.bard",
|
||||
"FrIeNdLy-._.-bArD",
|
||||
];
|
||||
for input in inputs {
|
||||
assert_eq!(validate_and_normalize_ref(input).unwrap(), "friendly-bard");
|
||||
assert_eq!(
|
||||
validate_and_normalize_owned(input.to_string()).unwrap(),
|
||||
"friendly-bard"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check() {
|
||||
let inputs = ["friendly-bard", "friendlybard"];
|
||||
for input in inputs {
|
||||
assert!(is_normalized(input).unwrap(), "{input:?}");
|
||||
}
|
||||
|
||||
let inputs = [
|
||||
"friendly.bard",
|
||||
"friendly.BARD",
|
||||
"friendly_bard",
|
||||
"friendly--bard",
|
||||
"friendly-.bard",
|
||||
"FrIeNdLy-._.-bArD",
|
||||
];
|
||||
for input in inputs {
|
||||
assert!(!is_normalized(input).unwrap(), "{input:?}");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unchanged() {
|
||||
// Unchanged
|
||||
let unchanged = ["friendly-bard", "1okay", "okay2"];
|
||||
for input in unchanged {
|
||||
assert_eq!(validate_and_normalize_ref(input).unwrap(), input);
|
||||
assert_eq!(
|
||||
validate_and_normalize_owned(input.to_string()).unwrap(),
|
||||
input
|
||||
);
|
||||
assert!(is_normalized(input).unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn failures() {
|
||||
let failures = [
|
||||
" starts-with-space",
|
||||
"-starts-with-dash",
|
||||
"ends-with-dash-",
|
||||
"ends-with-space ",
|
||||
"includes!invalid-char",
|
||||
"space in middle",
|
||||
"alpha-α",
|
||||
];
|
||||
for input in failures {
|
||||
assert!(validate_and_normalize_ref(input).is_err());
|
||||
assert!(validate_and_normalize_owned(input.to_string()).is_err());
|
||||
assert!(is_normalized(input).is_err());
|
||||
}
|
||||
}
|
|
@ -9,6 +9,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -9,6 +9,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -100,76 +100,6 @@ impl OptionSet {
|
|||
/// Returns `true` if this set has an option that resolves to `name`.
|
||||
///
|
||||
/// The name can be separated by `.` to find a nested option.
|
||||
///
|
||||
/// ## Examples
|
||||
///
|
||||
/// ### Test for the existence of a child option
|
||||
///
|
||||
/// ```rust
|
||||
/// # use uv_options_metadata::{OptionField, OptionsMetadata, Visit};
|
||||
///
|
||||
/// struct WithOptions;
|
||||
///
|
||||
/// impl OptionsMetadata for WithOptions {
|
||||
/// fn record(visit: &mut dyn Visit) {
|
||||
/// visit.record_field("ignore-git-ignore", OptionField {
|
||||
/// doc: "Whether Ruff should respect the gitignore file",
|
||||
/// default: "false",
|
||||
/// value_type: "bool",
|
||||
/// example: "",
|
||||
/// scope: None,
|
||||
/// deprecated: None,
|
||||
/// possible_values: None
|
||||
/// });
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// assert!(WithOptions::metadata().has("ignore-git-ignore"));
|
||||
/// assert!(!WithOptions::metadata().has("does-not-exist"));
|
||||
/// ```
|
||||
/// ### Test for the existence of a nested option
|
||||
///
|
||||
/// ```rust
|
||||
/// # use uv_options_metadata::{OptionField, OptionsMetadata, Visit};
|
||||
///
|
||||
/// struct Root;
|
||||
///
|
||||
/// impl OptionsMetadata for Root {
|
||||
/// fn record(visit: &mut dyn Visit) {
|
||||
/// visit.record_field("ignore-git-ignore", OptionField {
|
||||
/// doc: "Whether Ruff should respect the gitignore file",
|
||||
/// default: "false",
|
||||
/// value_type: "bool",
|
||||
/// example: "",
|
||||
/// scope: None,
|
||||
/// deprecated: None,
|
||||
/// possible_values: None
|
||||
/// });
|
||||
///
|
||||
/// visit.record_set("format", Nested::metadata());
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// struct Nested;
|
||||
///
|
||||
/// impl OptionsMetadata for Nested {
|
||||
/// fn record(visit: &mut dyn Visit) {
|
||||
/// visit.record_field("hard-tabs", OptionField {
|
||||
/// doc: "Use hard tabs for indentation and spaces for alignment.",
|
||||
/// default: "false",
|
||||
/// value_type: "bool",
|
||||
/// example: "",
|
||||
/// scope: None,
|
||||
/// deprecated: None,
|
||||
/// possible_values: None
|
||||
/// });
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// assert!(Root::metadata().has("format.hard-tabs"));
|
||||
/// assert!(!Root::metadata().has("format.spaces"));
|
||||
/// assert!(!Root::metadata().has("lint.hard-tabs"));
|
||||
/// ```
|
||||
pub fn has(&self, name: &str) -> bool {
|
||||
self.find(name).is_some()
|
||||
}
|
||||
|
@ -177,81 +107,6 @@ impl OptionSet {
|
|||
/// Returns `Some` if this set has an option that resolves to `name` and `None` otherwise.
|
||||
///
|
||||
/// The name can be separated by `.` to find a nested option.
|
||||
///
|
||||
/// ## Examples
|
||||
///
|
||||
/// ### Find a child option
|
||||
///
|
||||
/// ```rust
|
||||
/// # use uv_options_metadata::{OptionEntry, OptionField, OptionsMetadata, Visit};
|
||||
///
|
||||
/// struct WithOptions;
|
||||
///
|
||||
/// static IGNORE_GIT_IGNORE: OptionField = OptionField {
|
||||
/// doc: "Whether Ruff should respect the gitignore file",
|
||||
/// default: "false",
|
||||
/// value_type: "bool",
|
||||
/// example: "",
|
||||
/// scope: None,
|
||||
/// deprecated: None,
|
||||
/// possible_values: None
|
||||
/// };
|
||||
///
|
||||
/// impl OptionsMetadata for WithOptions {
|
||||
/// fn record(visit: &mut dyn Visit) {
|
||||
/// visit.record_field("ignore-git-ignore", IGNORE_GIT_IGNORE.clone());
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// assert_eq!(WithOptions::metadata().find("ignore-git-ignore"), Some(OptionEntry::Field(IGNORE_GIT_IGNORE.clone())));
|
||||
/// assert_eq!(WithOptions::metadata().find("does-not-exist"), None);
|
||||
/// ```
|
||||
/// ### Find a nested option
|
||||
///
|
||||
/// ```rust
|
||||
/// # use uv_options_metadata::{OptionEntry, OptionField, OptionsMetadata, Visit};
|
||||
///
|
||||
/// static HARD_TABS: OptionField = OptionField {
|
||||
/// doc: "Use hard tabs for indentation and spaces for alignment.",
|
||||
/// default: "false",
|
||||
/// value_type: "bool",
|
||||
/// example: "",
|
||||
/// scope: None,
|
||||
/// deprecated: None,
|
||||
/// possible_values: None
|
||||
/// };
|
||||
///
|
||||
/// struct Root;
|
||||
///
|
||||
/// impl OptionsMetadata for Root {
|
||||
/// fn record(visit: &mut dyn Visit) {
|
||||
/// visit.record_field("ignore-git-ignore", OptionField {
|
||||
/// doc: "Whether Ruff should respect the gitignore file",
|
||||
/// default: "false",
|
||||
/// value_type: "bool",
|
||||
/// example: "",
|
||||
/// scope: None,
|
||||
/// deprecated: None,
|
||||
/// possible_values: None
|
||||
/// });
|
||||
///
|
||||
/// visit.record_set("format", Nested::metadata());
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// struct Nested;
|
||||
///
|
||||
/// impl OptionsMetadata for Nested {
|
||||
/// fn record(visit: &mut dyn Visit) {
|
||||
/// visit.record_field("hard-tabs", HARD_TABS.clone());
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// assert_eq!(Root::metadata().find("format.hard-tabs"), Some(OptionEntry::Field(HARD_TABS.clone())));
|
||||
/// assert_eq!(Root::metadata().find("format"), Some(OptionEntry::Set(Nested::metadata())));
|
||||
/// assert_eq!(Root::metadata().find("format.spaces"), None);
|
||||
/// assert_eq!(Root::metadata().find("lint.hard-tabs"), None);
|
||||
/// ```
|
||||
pub fn find(&self, name: &str) -> Option<OptionEntry> {
|
||||
struct FindOptionVisitor<'a> {
|
||||
option: Option<OptionEntry>,
|
||||
|
@ -459,3 +314,6 @@ impl Display for PossibleValue {
|
|||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
|
153
crates/uv-options-metadata/src/tests.rs
Normal file
153
crates/uv-options-metadata/src/tests.rs
Normal file
|
@ -0,0 +1,153 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_has_child_option() {
|
||||
struct WithOptions;
|
||||
|
||||
impl OptionsMetadata for WithOptions {
|
||||
fn record(visit: &mut dyn Visit) {
|
||||
visit.record_field(
|
||||
"ignore-git-ignore",
|
||||
OptionField {
|
||||
doc: "Whether Ruff should respect the gitignore file",
|
||||
default: "false",
|
||||
value_type: "bool",
|
||||
example: "",
|
||||
scope: None,
|
||||
deprecated: None,
|
||||
possible_values: None,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
assert!(WithOptions::metadata().has("ignore-git-ignore"));
|
||||
assert!(!WithOptions::metadata().has("does-not-exist"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_has_nested_option() {
|
||||
struct Root;
|
||||
|
||||
impl OptionsMetadata for Root {
|
||||
fn record(visit: &mut dyn Visit) {
|
||||
visit.record_field(
|
||||
"ignore-git-ignore",
|
||||
OptionField {
|
||||
doc: "Whether Ruff should respect the gitignore file",
|
||||
default: "false",
|
||||
value_type: "bool",
|
||||
example: "",
|
||||
scope: None,
|
||||
deprecated: None,
|
||||
possible_values: None,
|
||||
},
|
||||
);
|
||||
|
||||
visit.record_set("format", Nested::metadata());
|
||||
}
|
||||
}
|
||||
|
||||
struct Nested;
|
||||
|
||||
impl OptionsMetadata for Nested {
|
||||
fn record(visit: &mut dyn Visit) {
|
||||
visit.record_field(
|
||||
"hard-tabs",
|
||||
OptionField {
|
||||
doc: "Use hard tabs for indentation and spaces for alignment.",
|
||||
default: "false",
|
||||
value_type: "bool",
|
||||
example: "",
|
||||
scope: None,
|
||||
deprecated: None,
|
||||
possible_values: None,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
assert!(Root::metadata().has("format.hard-tabs"));
|
||||
assert!(!Root::metadata().has("format.spaces"));
|
||||
assert!(!Root::metadata().has("lint.hard-tabs"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_child_option() {
|
||||
struct WithOptions;
|
||||
|
||||
static IGNORE_GIT_IGNORE: OptionField = OptionField {
|
||||
doc: "Whether Ruff should respect the gitignore file",
|
||||
default: "false",
|
||||
value_type: "bool",
|
||||
example: "",
|
||||
scope: None,
|
||||
deprecated: None,
|
||||
possible_values: None,
|
||||
};
|
||||
|
||||
impl OptionsMetadata for WithOptions {
|
||||
fn record(visit: &mut dyn Visit) {
|
||||
visit.record_field("ignore-git-ignore", IGNORE_GIT_IGNORE.clone());
|
||||
}
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
WithOptions::metadata().find("ignore-git-ignore"),
|
||||
Some(OptionEntry::Field(IGNORE_GIT_IGNORE.clone()))
|
||||
);
|
||||
assert_eq!(WithOptions::metadata().find("does-not-exist"), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_nested_option() {
|
||||
static HARD_TABS: OptionField = OptionField {
|
||||
doc: "Use hard tabs for indentation and spaces for alignment.",
|
||||
default: "false",
|
||||
value_type: "bool",
|
||||
example: "",
|
||||
scope: None,
|
||||
deprecated: None,
|
||||
possible_values: None,
|
||||
};
|
||||
|
||||
struct Root;
|
||||
|
||||
impl OptionsMetadata for Root {
|
||||
fn record(visit: &mut dyn Visit) {
|
||||
visit.record_field(
|
||||
"ignore-git-ignore",
|
||||
OptionField {
|
||||
doc: "Whether Ruff should respect the gitignore file",
|
||||
default: "false",
|
||||
value_type: "bool",
|
||||
example: "",
|
||||
scope: None,
|
||||
deprecated: None,
|
||||
possible_values: None,
|
||||
},
|
||||
);
|
||||
|
||||
visit.record_set("format", Nested::metadata());
|
||||
}
|
||||
}
|
||||
|
||||
struct Nested;
|
||||
|
||||
impl OptionsMetadata for Nested {
|
||||
fn record(visit: &mut dyn Visit) {
|
||||
visit.record_field("hard-tabs", HARD_TABS.clone());
|
||||
}
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
Root::metadata().find("format.hard-tabs"),
|
||||
Some(OptionEntry::Field(HARD_TABS.clone()))
|
||||
);
|
||||
assert_eq!(
|
||||
Root::metadata().find("format"),
|
||||
Some(OptionEntry::Set(Nested::metadata()))
|
||||
);
|
||||
assert_eq!(Root::metadata().find("format.spaces"), None);
|
||||
assert_eq!(Root::metadata().find("lint.hard-tabs"), None);
|
||||
}
|
|
@ -14,6 +14,7 @@ authors = { workspace = true }
|
|||
[lib]
|
||||
name = "uv_pep440"
|
||||
crate-type = ["rlib", "cdylib"]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
@ -27,6 +28,7 @@ unscanny = { workspace = true }
|
|||
|
||||
[dev-dependencies]
|
||||
indoc = { version = "2.0.5" }
|
||||
tracing = { workspace = true }
|
||||
|
||||
[features]
|
||||
# Match the API of the published crate, for compatibility.
|
||||
|
|
|
@ -1,17 +1,6 @@
|
|||
//! A library for python version numbers and specifiers, implementing
|
||||
//! [PEP 440](https://peps.python.org/pep-0440)
|
||||
//!
|
||||
//! ```rust
|
||||
//! use std::str::FromStr;
|
||||
//! use uv_pep440::{VersionSpecifiers, Version, VersionSpecifier};
|
||||
//!
|
||||
//! let version = Version::from_str("1.19").unwrap();
|
||||
//! let version_specifier = VersionSpecifier::from_str("== 1.*").unwrap();
|
||||
//! assert!(version_specifier.contains(&version));
|
||||
//! let version_specifiers = VersionSpecifiers::from_str(">=1.16, <2.0").unwrap();
|
||||
//! assert!(version_specifiers.contains(&version));
|
||||
//! ```
|
||||
//!
|
||||
//! PEP 440 has a lot of unintuitive features, including:
|
||||
//!
|
||||
//! * An epoch that you can prefix the version which, e.g. `1!1.2.3`. Lower epoch always means lower
|
||||
|
@ -47,3 +36,6 @@ pub use {
|
|||
|
||||
mod version;
|
||||
mod version_specifier;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
|
11
crates/uv-pep440/src/tests.rs
Normal file
11
crates/uv-pep440/src/tests.rs
Normal file
|
@ -0,0 +1,11 @@
|
|||
use super::{Version, VersionSpecifier, VersionSpecifiers};
|
||||
use std::str::FromStr;
|
||||
|
||||
#[test]
|
||||
fn test_version() {
|
||||
let version = Version::from_str("1.19").unwrap();
|
||||
let version_specifier = VersionSpecifier::from_str("== 1.*").unwrap();
|
||||
assert!(version_specifier.contains(&version));
|
||||
let version_specifiers = VersionSpecifiers::from_str(">=1.16, <2.0").unwrap();
|
||||
assert!(version_specifiers.contains(&version));
|
||||
}
|
File diff suppressed because it is too large
Load diff
1343
crates/uv-pep440/src/version/tests.rs
Normal file
1343
crates/uv-pep440/src/version/tests.rs
Normal file
File diff suppressed because it is too large
Load diff
|
@ -802,959 +802,4 @@ pub(crate) fn parse_version_specifiers(
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::{cmp::Ordering, str::FromStr};
|
||||
|
||||
use indoc::indoc;
|
||||
|
||||
use crate::LocalSegment;
|
||||
|
||||
use super::*;
|
||||
|
||||
/// <https://peps.python.org/pep-0440/#version-matching>
|
||||
#[test]
|
||||
fn test_equal() {
|
||||
let version = Version::from_str("1.1.post1").unwrap();
|
||||
|
||||
assert!(!VersionSpecifier::from_str("== 1.1")
|
||||
.unwrap()
|
||||
.contains(&version));
|
||||
assert!(VersionSpecifier::from_str("== 1.1.post1")
|
||||
.unwrap()
|
||||
.contains(&version));
|
||||
assert!(VersionSpecifier::from_str("== 1.1.*")
|
||||
.unwrap()
|
||||
.contains(&version));
|
||||
}
|
||||
|
||||
const VERSIONS_ALL: &[&str] = &[
|
||||
// Implicit epoch of 0
|
||||
"1.0.dev456",
|
||||
"1.0a1",
|
||||
"1.0a2.dev456",
|
||||
"1.0a12.dev456",
|
||||
"1.0a12",
|
||||
"1.0b1.dev456",
|
||||
"1.0b2",
|
||||
"1.0b2.post345.dev456",
|
||||
"1.0b2.post345",
|
||||
"1.0b2-346",
|
||||
"1.0c1.dev456",
|
||||
"1.0c1",
|
||||
"1.0rc2",
|
||||
"1.0c3",
|
||||
"1.0",
|
||||
"1.0.post456.dev34",
|
||||
"1.0.post456",
|
||||
"1.1.dev1",
|
||||
"1.2+123abc",
|
||||
"1.2+123abc456",
|
||||
"1.2+abc",
|
||||
"1.2+abc123",
|
||||
"1.2+abc123def",
|
||||
"1.2+1234.abc",
|
||||
"1.2+123456",
|
||||
"1.2.r32+123456",
|
||||
"1.2.rev33+123456",
|
||||
// Explicit epoch of 1
|
||||
"1!1.0.dev456",
|
||||
"1!1.0a1",
|
||||
"1!1.0a2.dev456",
|
||||
"1!1.0a12.dev456",
|
||||
"1!1.0a12",
|
||||
"1!1.0b1.dev456",
|
||||
"1!1.0b2",
|
||||
"1!1.0b2.post345.dev456",
|
||||
"1!1.0b2.post345",
|
||||
"1!1.0b2-346",
|
||||
"1!1.0c1.dev456",
|
||||
"1!1.0c1",
|
||||
"1!1.0rc2",
|
||||
"1!1.0c3",
|
||||
"1!1.0",
|
||||
"1!1.0.post456.dev34",
|
||||
"1!1.0.post456",
|
||||
"1!1.1.dev1",
|
||||
"1!1.2+123abc",
|
||||
"1!1.2+123abc456",
|
||||
"1!1.2+abc",
|
||||
"1!1.2+abc123",
|
||||
"1!1.2+abc123def",
|
||||
"1!1.2+1234.abc",
|
||||
"1!1.2+123456",
|
||||
"1!1.2.r32+123456",
|
||||
"1!1.2.rev33+123456",
|
||||
];
|
||||
|
||||
/// <https://github.com/pypa/packaging/blob/237ff3aa348486cf835a980592af3a59fccd6101/tests/test_version.py#L666-L707>
|
||||
/// <https://github.com/pypa/packaging/blob/237ff3aa348486cf835a980592af3a59fccd6101/tests/test_version.py#L709-L750>
|
||||
///
|
||||
/// These tests are a lot shorter than the pypa/packaging version since we implement all
|
||||
/// comparisons through one method
|
||||
#[test]
|
||||
fn test_operators_true() {
|
||||
let versions: Vec<Version> = VERSIONS_ALL
|
||||
.iter()
|
||||
.map(|version| Version::from_str(version).unwrap())
|
||||
.collect();
|
||||
|
||||
// Below we'll generate every possible combination of VERSIONS_ALL that
|
||||
// should be true for the given operator
|
||||
let operations = [
|
||||
// Verify that the less than (<) operator works correctly
|
||||
versions
|
||||
.iter()
|
||||
.enumerate()
|
||||
.flat_map(|(i, x)| {
|
||||
versions[i + 1..]
|
||||
.iter()
|
||||
.map(move |y| (x, y, Ordering::Less))
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
// Verify that the equal (==) operator works correctly
|
||||
versions
|
||||
.iter()
|
||||
.map(move |x| (x, x, Ordering::Equal))
|
||||
.collect::<Vec<_>>(),
|
||||
// Verify that the greater than (>) operator works correctly
|
||||
versions
|
||||
.iter()
|
||||
.enumerate()
|
||||
.flat_map(|(i, x)| versions[..i].iter().map(move |y| (x, y, Ordering::Greater)))
|
||||
.collect::<Vec<_>>(),
|
||||
]
|
||||
.into_iter()
|
||||
.flatten();
|
||||
|
||||
for (a, b, ordering) in operations {
|
||||
assert_eq!(a.cmp(b), ordering, "{a} {ordering:?} {b}");
|
||||
}
|
||||
}
|
||||
|
||||
const VERSIONS_0: &[&str] = &[
|
||||
"1.0.dev456",
|
||||
"1.0a1",
|
||||
"1.0a2.dev456",
|
||||
"1.0a12.dev456",
|
||||
"1.0a12",
|
||||
"1.0b1.dev456",
|
||||
"1.0b2",
|
||||
"1.0b2.post345.dev456",
|
||||
"1.0b2.post345",
|
||||
"1.0b2-346",
|
||||
"1.0c1.dev456",
|
||||
"1.0c1",
|
||||
"1.0rc2",
|
||||
"1.0c3",
|
||||
"1.0",
|
||||
"1.0.post456.dev34",
|
||||
"1.0.post456",
|
||||
"1.1.dev1",
|
||||
"1.2+123abc",
|
||||
"1.2+123abc456",
|
||||
"1.2+abc",
|
||||
"1.2+abc123",
|
||||
"1.2+abc123def",
|
||||
"1.2+1234.abc",
|
||||
"1.2+123456",
|
||||
"1.2.r32+123456",
|
||||
"1.2.rev33+123456",
|
||||
];
|
||||
|
||||
const SPECIFIERS_OTHER: &[&str] = &[
|
||||
"== 1.*", "== 1.0.*", "== 1.1.*", "== 1.2.*", "== 2.*", "~= 1.0", "~= 1.0b1", "~= 1.1",
|
||||
"~= 1.2", "~= 2.0",
|
||||
];
|
||||
|
||||
const EXPECTED_OTHER: &[[bool; 10]] = &[
|
||||
[
|
||||
true, true, false, false, false, false, false, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, false, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, false, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, false, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, false, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, false, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, true, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, true, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, true, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, false, true, false, false, true, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, false, false, true, false, true, true, true, true, false,
|
||||
],
|
||||
[
|
||||
true, false, false, true, false, true, true, true, true, false,
|
||||
],
|
||||
[
|
||||
true, false, false, true, false, true, true, true, true, false,
|
||||
],
|
||||
[
|
||||
true, false, false, true, false, true, true, true, true, false,
|
||||
],
|
||||
[
|
||||
true, false, false, true, false, true, true, true, true, false,
|
||||
],
|
||||
[
|
||||
true, false, false, true, false, true, true, true, true, false,
|
||||
],
|
||||
[
|
||||
true, false, false, true, false, true, true, true, true, false,
|
||||
],
|
||||
[
|
||||
true, false, false, true, false, true, true, true, true, false,
|
||||
],
|
||||
[
|
||||
true, false, false, true, false, true, true, true, true, false,
|
||||
],
|
||||
];
|
||||
|
||||
/// Test for tilde equal (~=) and star equal (== x.y.*) recorded from pypa/packaging
|
||||
///
|
||||
/// Well, except for <https://github.com/pypa/packaging/issues/617>
|
||||
#[test]
|
||||
fn test_operators_other() {
|
||||
let versions = VERSIONS_0
|
||||
.iter()
|
||||
.map(|version| Version::from_str(version).unwrap());
|
||||
let specifiers: Vec<_> = SPECIFIERS_OTHER
|
||||
.iter()
|
||||
.map(|specifier| VersionSpecifier::from_str(specifier).unwrap())
|
||||
.collect();
|
||||
|
||||
for (version, expected) in versions.zip(EXPECTED_OTHER) {
|
||||
let actual = specifiers
|
||||
.iter()
|
||||
.map(|specifier| specifier.contains(&version));
|
||||
for ((actual, expected), _specifier) in actual.zip(expected).zip(SPECIFIERS_OTHER) {
|
||||
assert_eq!(actual, *expected);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_arbitrary_equality() {
|
||||
assert!(VersionSpecifier::from_str("=== 1.2a1")
|
||||
.unwrap()
|
||||
.contains(&Version::from_str("1.2a1").unwrap()));
|
||||
assert!(!VersionSpecifier::from_str("=== 1.2a1")
|
||||
.unwrap()
|
||||
.contains(&Version::from_str("1.2a1+local").unwrap()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_specifiers_true() {
|
||||
let pairs = [
|
||||
// Test the equality operation
|
||||
("2.0", "==2"),
|
||||
("2.0", "==2.0"),
|
||||
("2.0", "==2.0.0"),
|
||||
("2.0+deadbeef", "==2"),
|
||||
("2.0+deadbeef", "==2.0"),
|
||||
("2.0+deadbeef", "==2.0.0"),
|
||||
("2.0+deadbeef", "==2+deadbeef"),
|
||||
("2.0+deadbeef", "==2.0+deadbeef"),
|
||||
("2.0+deadbeef", "==2.0.0+deadbeef"),
|
||||
("2.0+deadbeef.0", "==2.0.0+deadbeef.00"),
|
||||
// Test the equality operation with a prefix
|
||||
("2.dev1", "==2.*"),
|
||||
("2a1", "==2.*"),
|
||||
("2a1.post1", "==2.*"),
|
||||
("2b1", "==2.*"),
|
||||
("2b1.dev1", "==2.*"),
|
||||
("2c1", "==2.*"),
|
||||
("2c1.post1.dev1", "==2.*"),
|
||||
("2c1.post1.dev1", "==2.0.*"),
|
||||
("2rc1", "==2.*"),
|
||||
("2rc1", "==2.0.*"),
|
||||
("2", "==2.*"),
|
||||
("2", "==2.0.*"),
|
||||
("2", "==0!2.*"),
|
||||
("0!2", "==2.*"),
|
||||
("2.0", "==2.*"),
|
||||
("2.0.0", "==2.*"),
|
||||
("2.1+local.version", "==2.1.*"),
|
||||
// Test the in-equality operation
|
||||
("2.1", "!=2"),
|
||||
("2.1", "!=2.0"),
|
||||
("2.0.1", "!=2"),
|
||||
("2.0.1", "!=2.0"),
|
||||
("2.0.1", "!=2.0.0"),
|
||||
("2.0", "!=2.0+deadbeef"),
|
||||
// Test the in-equality operation with a prefix
|
||||
("2.0", "!=3.*"),
|
||||
("2.1", "!=2.0.*"),
|
||||
// Test the greater than equal operation
|
||||
("2.0", ">=2"),
|
||||
("2.0", ">=2.0"),
|
||||
("2.0", ">=2.0.0"),
|
||||
("2.0.post1", ">=2"),
|
||||
("2.0.post1.dev1", ">=2"),
|
||||
("3", ">=2"),
|
||||
// Test the less than equal operation
|
||||
("2.0", "<=2"),
|
||||
("2.0", "<=2.0"),
|
||||
("2.0", "<=2.0.0"),
|
||||
("2.0.dev1", "<=2"),
|
||||
("2.0a1", "<=2"),
|
||||
("2.0a1.dev1", "<=2"),
|
||||
("2.0b1", "<=2"),
|
||||
("2.0b1.post1", "<=2"),
|
||||
("2.0c1", "<=2"),
|
||||
("2.0c1.post1.dev1", "<=2"),
|
||||
("2.0rc1", "<=2"),
|
||||
("1", "<=2"),
|
||||
// Test the greater than operation
|
||||
("3", ">2"),
|
||||
("2.1", ">2.0"),
|
||||
("2.0.1", ">2"),
|
||||
("2.1.post1", ">2"),
|
||||
("2.1+local.version", ">2"),
|
||||
// Test the less than operation
|
||||
("1", "<2"),
|
||||
("2.0", "<2.1"),
|
||||
("2.0.dev0", "<2.1"),
|
||||
// Test the compatibility operation
|
||||
("1", "~=1.0"),
|
||||
("1.0.1", "~=1.0"),
|
||||
("1.1", "~=1.0"),
|
||||
("1.9999999", "~=1.0"),
|
||||
("1.1", "~=1.0a1"),
|
||||
("2022.01.01", "~=2022.01.01"),
|
||||
// Test that epochs are handled sanely
|
||||
("2!1.0", "~=2!1.0"),
|
||||
("2!1.0", "==2!1.*"),
|
||||
("2!1.0", "==2!1.0"),
|
||||
("2!1.0", "!=1.0"),
|
||||
("1.0", "!=2!1.0"),
|
||||
("1.0", "<=2!0.1"),
|
||||
("2!1.0", ">=2.0"),
|
||||
("1.0", "<2!0.1"),
|
||||
("2!1.0", ">2.0"),
|
||||
// Test some normalization rules
|
||||
("2.0.5", ">2.0dev"),
|
||||
];
|
||||
|
||||
for (s_version, s_spec) in pairs {
|
||||
let version = s_version.parse::<Version>().unwrap();
|
||||
let spec = s_spec.parse::<VersionSpecifier>().unwrap();
|
||||
assert!(
|
||||
spec.contains(&version),
|
||||
"{s_version} {s_spec}\nversion repr: {:?}\nspec version repr: {:?}",
|
||||
version.as_bloated_debug(),
|
||||
spec.version.as_bloated_debug(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_specifier_false() {
|
||||
let pairs = [
|
||||
// Test the equality operation
|
||||
("2.1", "==2"),
|
||||
("2.1", "==2.0"),
|
||||
("2.1", "==2.0.0"),
|
||||
("2.0", "==2.0+deadbeef"),
|
||||
// Test the equality operation with a prefix
|
||||
("2.0", "==3.*"),
|
||||
("2.1", "==2.0.*"),
|
||||
// Test the in-equality operation
|
||||
("2.0", "!=2"),
|
||||
("2.0", "!=2.0"),
|
||||
("2.0", "!=2.0.0"),
|
||||
("2.0+deadbeef", "!=2"),
|
||||
("2.0+deadbeef", "!=2.0"),
|
||||
("2.0+deadbeef", "!=2.0.0"),
|
||||
("2.0+deadbeef", "!=2+deadbeef"),
|
||||
("2.0+deadbeef", "!=2.0+deadbeef"),
|
||||
("2.0+deadbeef", "!=2.0.0+deadbeef"),
|
||||
("2.0+deadbeef.0", "!=2.0.0+deadbeef.00"),
|
||||
// Test the in-equality operation with a prefix
|
||||
("2.dev1", "!=2.*"),
|
||||
("2a1", "!=2.*"),
|
||||
("2a1.post1", "!=2.*"),
|
||||
("2b1", "!=2.*"),
|
||||
("2b1.dev1", "!=2.*"),
|
||||
("2c1", "!=2.*"),
|
||||
("2c1.post1.dev1", "!=2.*"),
|
||||
("2c1.post1.dev1", "!=2.0.*"),
|
||||
("2rc1", "!=2.*"),
|
||||
("2rc1", "!=2.0.*"),
|
||||
("2", "!=2.*"),
|
||||
("2", "!=2.0.*"),
|
||||
("2.0", "!=2.*"),
|
||||
("2.0.0", "!=2.*"),
|
||||
// Test the greater than equal operation
|
||||
("2.0.dev1", ">=2"),
|
||||
("2.0a1", ">=2"),
|
||||
("2.0a1.dev1", ">=2"),
|
||||
("2.0b1", ">=2"),
|
||||
("2.0b1.post1", ">=2"),
|
||||
("2.0c1", ">=2"),
|
||||
("2.0c1.post1.dev1", ">=2"),
|
||||
("2.0rc1", ">=2"),
|
||||
("1", ">=2"),
|
||||
// Test the less than equal operation
|
||||
("2.0.post1", "<=2"),
|
||||
("2.0.post1.dev1", "<=2"),
|
||||
("3", "<=2"),
|
||||
// Test the greater than operation
|
||||
("1", ">2"),
|
||||
("2.0.dev1", ">2"),
|
||||
("2.0a1", ">2"),
|
||||
("2.0a1.post1", ">2"),
|
||||
("2.0b1", ">2"),
|
||||
("2.0b1.dev1", ">2"),
|
||||
("2.0c1", ">2"),
|
||||
("2.0c1.post1.dev1", ">2"),
|
||||
("2.0rc1", ">2"),
|
||||
("2.0", ">2"),
|
||||
("2.0.post1", ">2"),
|
||||
("2.0.post1.dev1", ">2"),
|
||||
("2.0+local.version", ">2"),
|
||||
// Test the less than operation
|
||||
("2.0.dev1", "<2"),
|
||||
("2.0a1", "<2"),
|
||||
("2.0a1.post1", "<2"),
|
||||
("2.0b1", "<2"),
|
||||
("2.0b2.dev1", "<2"),
|
||||
("2.0c1", "<2"),
|
||||
("2.0c1.post1.dev1", "<2"),
|
||||
("2.0rc1", "<2"),
|
||||
("2.0", "<2"),
|
||||
("2.post1", "<2"),
|
||||
("2.post1.dev1", "<2"),
|
||||
("3", "<2"),
|
||||
// Test the compatibility operation
|
||||
("2.0", "~=1.0"),
|
||||
("1.1.0", "~=1.0.0"),
|
||||
("1.1.post1", "~=1.0.0"),
|
||||
// Test that epochs are handled sanely
|
||||
("1.0", "~=2!1.0"),
|
||||
("2!1.0", "~=1.0"),
|
||||
("2!1.0", "==1.0"),
|
||||
("1.0", "==2!1.0"),
|
||||
("2!1.0", "==1.*"),
|
||||
("1.0", "==2!1.*"),
|
||||
("2!1.0", "!=2!1.0"),
|
||||
];
|
||||
for (version, specifier) in pairs {
|
||||
assert!(
|
||||
!VersionSpecifier::from_str(specifier)
|
||||
.unwrap()
|
||||
.contains(&Version::from_str(version).unwrap()),
|
||||
"{version} {specifier}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_version_specifiers() {
|
||||
let result = VersionSpecifiers::from_str("~= 0.9, >= 1.0, != 1.3.4.*, < 2.0").unwrap();
|
||||
assert_eq!(
|
||||
result.0,
|
||||
[
|
||||
VersionSpecifier {
|
||||
operator: Operator::TildeEqual,
|
||||
version: Version::new([0, 9]),
|
||||
},
|
||||
VersionSpecifier {
|
||||
operator: Operator::GreaterThanEqual,
|
||||
version: Version::new([1, 0]),
|
||||
},
|
||||
VersionSpecifier {
|
||||
operator: Operator::NotEqualStar,
|
||||
version: Version::new([1, 3, 4]),
|
||||
},
|
||||
VersionSpecifier {
|
||||
operator: Operator::LessThan,
|
||||
version: Version::new([2, 0]),
|
||||
}
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_error() {
|
||||
let result = VersionSpecifiers::from_str("~= 0.9, %= 1.0, != 1.3.4.*");
|
||||
assert_eq!(
|
||||
result.unwrap_err().to_string(),
|
||||
indoc! {r"
|
||||
Failed to parse version: Unexpected end of version specifier, expected operator:
|
||||
~= 0.9, %= 1.0, != 1.3.4.*
|
||||
^^^^^^^
|
||||
"}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_non_star_after_star() {
|
||||
let result = VersionSpecifiers::from_str("== 0.9.*.1");
|
||||
assert_eq!(
|
||||
result.unwrap_err().inner.err,
|
||||
ParseErrorKind::InvalidVersion(version::PatternErrorKind::WildcardNotTrailing.into())
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_star_wrong_operator() {
|
||||
let result = VersionSpecifiers::from_str(">= 0.9.1.*");
|
||||
assert_eq!(
|
||||
result.unwrap_err().inner.err,
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorWithStar {
|
||||
operator: Operator::GreaterThanEqual,
|
||||
}
|
||||
.into()
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_word() {
|
||||
let result = VersionSpecifiers::from_str("blergh");
|
||||
assert_eq!(
|
||||
result.unwrap_err().inner.err,
|
||||
ParseErrorKind::MissingOperator.into(),
|
||||
);
|
||||
}
|
||||
|
||||
/// <https://github.com/pypa/packaging/blob/e184feef1a28a5c574ec41f5c263a3a573861f5a/tests/test_specifiers.py#L44-L84>
|
||||
#[test]
|
||||
fn test_invalid_specifier() {
|
||||
let specifiers = [
|
||||
// Operator-less specifier
|
||||
("2.0", ParseErrorKind::MissingOperator.into()),
|
||||
// Invalid operator
|
||||
(
|
||||
"=>2.0",
|
||||
ParseErrorKind::InvalidOperator(OperatorParseError {
|
||||
got: "=>".to_string(),
|
||||
})
|
||||
.into(),
|
||||
),
|
||||
// Version-less specifier
|
||||
("==", ParseErrorKind::MissingVersion.into()),
|
||||
// Local segment on operators which don't support them
|
||||
(
|
||||
"~=1.0+5",
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorLocalCombo {
|
||||
operator: Operator::TildeEqual,
|
||||
version: Version::new([1, 0]).with_local(vec![LocalSegment::Number(5)]),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
">=1.0+deadbeef",
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorLocalCombo {
|
||||
operator: Operator::GreaterThanEqual,
|
||||
version: Version::new([1, 0])
|
||||
.with_local(vec![LocalSegment::String("deadbeef".to_string())]),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"<=1.0+abc123",
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorLocalCombo {
|
||||
operator: Operator::LessThanEqual,
|
||||
version: Version::new([1, 0])
|
||||
.with_local(vec![LocalSegment::String("abc123".to_string())]),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
">1.0+watwat",
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorLocalCombo {
|
||||
operator: Operator::GreaterThan,
|
||||
version: Version::new([1, 0])
|
||||
.with_local(vec![LocalSegment::String("watwat".to_string())]),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"<1.0+1.0",
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorLocalCombo {
|
||||
operator: Operator::LessThan,
|
||||
version: Version::new([1, 0])
|
||||
.with_local(vec![LocalSegment::Number(1), LocalSegment::Number(0)]),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
// Prefix matching on operators which don't support them
|
||||
(
|
||||
"~=1.0.*",
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorWithStar {
|
||||
operator: Operator::TildeEqual,
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
">=1.0.*",
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorWithStar {
|
||||
operator: Operator::GreaterThanEqual,
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"<=1.0.*",
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorWithStar {
|
||||
operator: Operator::LessThanEqual,
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
">1.0.*",
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorWithStar {
|
||||
operator: Operator::GreaterThan,
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"<1.0.*",
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorWithStar {
|
||||
operator: Operator::LessThan,
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
// Combination of local and prefix matching on operators which do
|
||||
// support one or the other
|
||||
(
|
||||
"==1.0.*+5",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::PatternErrorKind::WildcardNotTrailing.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"!=1.0.*+deadbeef",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::PatternErrorKind::WildcardNotTrailing.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
// Prefix matching cannot be used with a pre-release, post-release,
|
||||
// dev or local version
|
||||
(
|
||||
"==2.0a1.*",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::ErrorKind::UnexpectedEnd {
|
||||
version: "2.0a1".to_string(),
|
||||
remaining: ".*".to_string(),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"!=2.0a1.*",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::ErrorKind::UnexpectedEnd {
|
||||
version: "2.0a1".to_string(),
|
||||
remaining: ".*".to_string(),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"==2.0.post1.*",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::ErrorKind::UnexpectedEnd {
|
||||
version: "2.0.post1".to_string(),
|
||||
remaining: ".*".to_string(),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"!=2.0.post1.*",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::ErrorKind::UnexpectedEnd {
|
||||
version: "2.0.post1".to_string(),
|
||||
remaining: ".*".to_string(),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"==2.0.dev1.*",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::ErrorKind::UnexpectedEnd {
|
||||
version: "2.0.dev1".to_string(),
|
||||
remaining: ".*".to_string(),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"!=2.0.dev1.*",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::ErrorKind::UnexpectedEnd {
|
||||
version: "2.0.dev1".to_string(),
|
||||
remaining: ".*".to_string(),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"==1.0+5.*",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::ErrorKind::LocalEmpty { precursor: '.' }.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"!=1.0+deadbeef.*",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::ErrorKind::LocalEmpty { precursor: '.' }.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
// Prefix matching must appear at the end
|
||||
(
|
||||
"==1.0.*.5",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::PatternErrorKind::WildcardNotTrailing.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
// Compatible operator requires 2 digits in the release operator
|
||||
(
|
||||
"~=1",
|
||||
ParseErrorKind::InvalidSpecifier(BuildErrorKind::CompatibleRelease.into()).into(),
|
||||
),
|
||||
// Cannot use a prefix matching after a .devN version
|
||||
(
|
||||
"==1.0.dev1.*",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::ErrorKind::UnexpectedEnd {
|
||||
version: "1.0.dev1".to_string(),
|
||||
remaining: ".*".to_string(),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"!=1.0.dev1.*",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::ErrorKind::UnexpectedEnd {
|
||||
version: "1.0.dev1".to_string(),
|
||||
remaining: ".*".to_string(),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
];
|
||||
for (specifier, error) in specifiers {
|
||||
assert_eq!(VersionSpecifier::from_str(specifier).unwrap_err(), error);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_display_start() {
|
||||
assert_eq!(
|
||||
VersionSpecifier::from_str("== 1.1.*")
|
||||
.unwrap()
|
||||
.to_string(),
|
||||
"==1.1.*"
|
||||
);
|
||||
assert_eq!(
|
||||
VersionSpecifier::from_str("!= 1.1.*")
|
||||
.unwrap()
|
||||
.to_string(),
|
||||
"!=1.1.*"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_version_specifiers_str() {
|
||||
assert_eq!(
|
||||
VersionSpecifiers::from_str(">= 3.7").unwrap().to_string(),
|
||||
">=3.7"
|
||||
);
|
||||
assert_eq!(
|
||||
VersionSpecifiers::from_str(">=3.7, < 4.0, != 3.9.0")
|
||||
.unwrap()
|
||||
.to_string(),
|
||||
">=3.7, !=3.9.0, <4.0"
|
||||
);
|
||||
}
|
||||
|
||||
/// These occur in the simple api, e.g.
|
||||
/// <https://pypi.org/simple/geopandas/?format=application/vnd.pypi.simple.v1+json>
|
||||
#[test]
|
||||
fn test_version_specifiers_empty() {
|
||||
assert_eq!(VersionSpecifiers::from_str("").unwrap().to_string(), "");
|
||||
}
|
||||
|
||||
/// All non-ASCII version specifiers are invalid, but the user can still
|
||||
/// attempt to parse a non-ASCII string as a version specifier. This
|
||||
/// ensures no panics occur and that the error reported has correct info.
|
||||
#[test]
|
||||
fn non_ascii_version_specifier() {
|
||||
let s = "💩";
|
||||
let err = s.parse::<VersionSpecifiers>().unwrap_err();
|
||||
assert_eq!(err.inner.start, 0);
|
||||
assert_eq!(err.inner.end, 4);
|
||||
|
||||
// The first test here is plain ASCII and it gives the
|
||||
// expected result: the error starts at codepoint 12,
|
||||
// which is the start of `>5.%`.
|
||||
let s = ">=3.7, <4.0,>5.%";
|
||||
let err = s.parse::<VersionSpecifiers>().unwrap_err();
|
||||
assert_eq!(err.inner.start, 12);
|
||||
assert_eq!(err.inner.end, 16);
|
||||
// In this case, we replace a single ASCII codepoint
|
||||
// with U+3000 IDEOGRAPHIC SPACE. Its *visual* width is
|
||||
// 2 despite it being a single codepoint. This causes
|
||||
// the offsets in the error reporting logic to become
|
||||
// incorrect.
|
||||
//
|
||||
// ... it did. This bug was fixed by switching to byte
|
||||
// offsets.
|
||||
let s = ">=3.7,\u{3000}<4.0,>5.%";
|
||||
let err = s.parse::<VersionSpecifiers>().unwrap_err();
|
||||
assert_eq!(err.inner.start, 14);
|
||||
assert_eq!(err.inner.end, 18);
|
||||
}
|
||||
|
||||
/// Tests the human readable error messages generated from an invalid
|
||||
/// sequence of version specifiers.
|
||||
#[test]
|
||||
fn error_message_version_specifiers_parse_error() {
|
||||
let specs = ">=1.2.3, 5.4.3, >=3.4.5";
|
||||
let err = VersionSpecifierParseError {
|
||||
kind: Box::new(ParseErrorKind::MissingOperator),
|
||||
};
|
||||
let inner = Box::new(VersionSpecifiersParseErrorInner {
|
||||
err,
|
||||
line: specs.to_string(),
|
||||
start: 8,
|
||||
end: 14,
|
||||
});
|
||||
let err = VersionSpecifiersParseError { inner };
|
||||
assert_eq!(err, VersionSpecifiers::from_str(specs).unwrap_err());
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"\
|
||||
Failed to parse version: Unexpected end of version specifier, expected operator:
|
||||
>=1.2.3, 5.4.3, >=3.4.5
|
||||
^^^^^^
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
/// Tests the human readable error messages generated when building an
|
||||
/// invalid version specifier.
|
||||
#[test]
|
||||
fn error_message_version_specifier_build_error() {
|
||||
let err = VersionSpecifierBuildError {
|
||||
kind: Box::new(BuildErrorKind::CompatibleRelease),
|
||||
};
|
||||
let op = Operator::TildeEqual;
|
||||
let v = Version::new([5]);
|
||||
let vpat = VersionPattern::verbatim(v);
|
||||
assert_eq!(err, VersionSpecifier::from_pattern(op, vpat).unwrap_err());
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"The ~= operator requires at least two segments in the release version"
|
||||
);
|
||||
}
|
||||
|
||||
/// Tests the human readable error messages generated from parsing invalid
|
||||
/// version specifier.
|
||||
#[test]
|
||||
fn error_message_version_specifier_parse_error() {
|
||||
let err = VersionSpecifierParseError {
|
||||
kind: Box::new(ParseErrorKind::InvalidSpecifier(
|
||||
VersionSpecifierBuildError {
|
||||
kind: Box::new(BuildErrorKind::CompatibleRelease),
|
||||
},
|
||||
)),
|
||||
};
|
||||
assert_eq!(err, VersionSpecifier::from_str("~=5").unwrap_err());
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"The ~= operator requires at least two segments in the release version"
|
||||
);
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
948
crates/uv-pep440/src/version_specifier/tests.rs
Normal file
948
crates/uv-pep440/src/version_specifier/tests.rs
Normal file
|
@ -0,0 +1,948 @@
|
|||
use std::{cmp::Ordering, str::FromStr};
|
||||
|
||||
use indoc::indoc;
|
||||
|
||||
use crate::LocalSegment;
|
||||
|
||||
use super::*;
|
||||
|
||||
/// <https://peps.python.org/pep-0440/#version-matching>
|
||||
#[test]
|
||||
fn test_equal() {
|
||||
let version = Version::from_str("1.1.post1").unwrap();
|
||||
|
||||
assert!(!VersionSpecifier::from_str("== 1.1")
|
||||
.unwrap()
|
||||
.contains(&version));
|
||||
assert!(VersionSpecifier::from_str("== 1.1.post1")
|
||||
.unwrap()
|
||||
.contains(&version));
|
||||
assert!(VersionSpecifier::from_str("== 1.1.*")
|
||||
.unwrap()
|
||||
.contains(&version));
|
||||
}
|
||||
|
||||
const VERSIONS_ALL: &[&str] = &[
|
||||
// Implicit epoch of 0
|
||||
"1.0.dev456",
|
||||
"1.0a1",
|
||||
"1.0a2.dev456",
|
||||
"1.0a12.dev456",
|
||||
"1.0a12",
|
||||
"1.0b1.dev456",
|
||||
"1.0b2",
|
||||
"1.0b2.post345.dev456",
|
||||
"1.0b2.post345",
|
||||
"1.0b2-346",
|
||||
"1.0c1.dev456",
|
||||
"1.0c1",
|
||||
"1.0rc2",
|
||||
"1.0c3",
|
||||
"1.0",
|
||||
"1.0.post456.dev34",
|
||||
"1.0.post456",
|
||||
"1.1.dev1",
|
||||
"1.2+123abc",
|
||||
"1.2+123abc456",
|
||||
"1.2+abc",
|
||||
"1.2+abc123",
|
||||
"1.2+abc123def",
|
||||
"1.2+1234.abc",
|
||||
"1.2+123456",
|
||||
"1.2.r32+123456",
|
||||
"1.2.rev33+123456",
|
||||
// Explicit epoch of 1
|
||||
"1!1.0.dev456",
|
||||
"1!1.0a1",
|
||||
"1!1.0a2.dev456",
|
||||
"1!1.0a12.dev456",
|
||||
"1!1.0a12",
|
||||
"1!1.0b1.dev456",
|
||||
"1!1.0b2",
|
||||
"1!1.0b2.post345.dev456",
|
||||
"1!1.0b2.post345",
|
||||
"1!1.0b2-346",
|
||||
"1!1.0c1.dev456",
|
||||
"1!1.0c1",
|
||||
"1!1.0rc2",
|
||||
"1!1.0c3",
|
||||
"1!1.0",
|
||||
"1!1.0.post456.dev34",
|
||||
"1!1.0.post456",
|
||||
"1!1.1.dev1",
|
||||
"1!1.2+123abc",
|
||||
"1!1.2+123abc456",
|
||||
"1!1.2+abc",
|
||||
"1!1.2+abc123",
|
||||
"1!1.2+abc123def",
|
||||
"1!1.2+1234.abc",
|
||||
"1!1.2+123456",
|
||||
"1!1.2.r32+123456",
|
||||
"1!1.2.rev33+123456",
|
||||
];
|
||||
|
||||
/// <https://github.com/pypa/packaging/blob/237ff3aa348486cf835a980592af3a59fccd6101/tests/test_version.py#L666-L707>
|
||||
/// <https://github.com/pypa/packaging/blob/237ff3aa348486cf835a980592af3a59fccd6101/tests/test_version.py#L709-L750>
|
||||
///
|
||||
/// These tests are a lot shorter than the pypa/packaging version since we implement all
|
||||
/// comparisons through one method
|
||||
#[test]
|
||||
fn test_operators_true() {
|
||||
let versions: Vec<Version> = VERSIONS_ALL
|
||||
.iter()
|
||||
.map(|version| Version::from_str(version).unwrap())
|
||||
.collect();
|
||||
|
||||
// Below we'll generate every possible combination of VERSIONS_ALL that
|
||||
// should be true for the given operator
|
||||
let operations = [
|
||||
// Verify that the less than (<) operator works correctly
|
||||
versions
|
||||
.iter()
|
||||
.enumerate()
|
||||
.flat_map(|(i, x)| {
|
||||
versions[i + 1..]
|
||||
.iter()
|
||||
.map(move |y| (x, y, Ordering::Less))
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
// Verify that the equal (==) operator works correctly
|
||||
versions
|
||||
.iter()
|
||||
.map(move |x| (x, x, Ordering::Equal))
|
||||
.collect::<Vec<_>>(),
|
||||
// Verify that the greater than (>) operator works correctly
|
||||
versions
|
||||
.iter()
|
||||
.enumerate()
|
||||
.flat_map(|(i, x)| versions[..i].iter().map(move |y| (x, y, Ordering::Greater)))
|
||||
.collect::<Vec<_>>(),
|
||||
]
|
||||
.into_iter()
|
||||
.flatten();
|
||||
|
||||
for (a, b, ordering) in operations {
|
||||
assert_eq!(a.cmp(b), ordering, "{a} {ordering:?} {b}");
|
||||
}
|
||||
}
|
||||
|
||||
const VERSIONS_0: &[&str] = &[
|
||||
"1.0.dev456",
|
||||
"1.0a1",
|
||||
"1.0a2.dev456",
|
||||
"1.0a12.dev456",
|
||||
"1.0a12",
|
||||
"1.0b1.dev456",
|
||||
"1.0b2",
|
||||
"1.0b2.post345.dev456",
|
||||
"1.0b2.post345",
|
||||
"1.0b2-346",
|
||||
"1.0c1.dev456",
|
||||
"1.0c1",
|
||||
"1.0rc2",
|
||||
"1.0c3",
|
||||
"1.0",
|
||||
"1.0.post456.dev34",
|
||||
"1.0.post456",
|
||||
"1.1.dev1",
|
||||
"1.2+123abc",
|
||||
"1.2+123abc456",
|
||||
"1.2+abc",
|
||||
"1.2+abc123",
|
||||
"1.2+abc123def",
|
||||
"1.2+1234.abc",
|
||||
"1.2+123456",
|
||||
"1.2.r32+123456",
|
||||
"1.2.rev33+123456",
|
||||
];
|
||||
|
||||
const SPECIFIERS_OTHER: &[&str] = &[
|
||||
"== 1.*", "== 1.0.*", "== 1.1.*", "== 1.2.*", "== 2.*", "~= 1.0", "~= 1.0b1", "~= 1.1",
|
||||
"~= 1.2", "~= 2.0",
|
||||
];
|
||||
|
||||
const EXPECTED_OTHER: &[[bool; 10]] = &[
|
||||
[
|
||||
true, true, false, false, false, false, false, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, false, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, false, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, false, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, false, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, false, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, false, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, true, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, true, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, true, false, false, false, true, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, false, true, false, false, true, true, false, false, false,
|
||||
],
|
||||
[
|
||||
true, false, false, true, false, true, true, true, true, false,
|
||||
],
|
||||
[
|
||||
true, false, false, true, false, true, true, true, true, false,
|
||||
],
|
||||
[
|
||||
true, false, false, true, false, true, true, true, true, false,
|
||||
],
|
||||
[
|
||||
true, false, false, true, false, true, true, true, true, false,
|
||||
],
|
||||
[
|
||||
true, false, false, true, false, true, true, true, true, false,
|
||||
],
|
||||
[
|
||||
true, false, false, true, false, true, true, true, true, false,
|
||||
],
|
||||
[
|
||||
true, false, false, true, false, true, true, true, true, false,
|
||||
],
|
||||
[
|
||||
true, false, false, true, false, true, true, true, true, false,
|
||||
],
|
||||
[
|
||||
true, false, false, true, false, true, true, true, true, false,
|
||||
],
|
||||
];
|
||||
|
||||
/// Test for tilde equal (~=) and star equal (== x.y.*) recorded from pypa/packaging
|
||||
///
|
||||
/// Well, except for <https://github.com/pypa/packaging/issues/617>
|
||||
#[test]
|
||||
fn test_operators_other() {
|
||||
let versions = VERSIONS_0
|
||||
.iter()
|
||||
.map(|version| Version::from_str(version).unwrap());
|
||||
let specifiers: Vec<_> = SPECIFIERS_OTHER
|
||||
.iter()
|
||||
.map(|specifier| VersionSpecifier::from_str(specifier).unwrap())
|
||||
.collect();
|
||||
|
||||
for (version, expected) in versions.zip(EXPECTED_OTHER) {
|
||||
let actual = specifiers
|
||||
.iter()
|
||||
.map(|specifier| specifier.contains(&version));
|
||||
for ((actual, expected), _specifier) in actual.zip(expected).zip(SPECIFIERS_OTHER) {
|
||||
assert_eq!(actual, *expected);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_arbitrary_equality() {
|
||||
assert!(VersionSpecifier::from_str("=== 1.2a1")
|
||||
.unwrap()
|
||||
.contains(&Version::from_str("1.2a1").unwrap()));
|
||||
assert!(!VersionSpecifier::from_str("=== 1.2a1")
|
||||
.unwrap()
|
||||
.contains(&Version::from_str("1.2a1+local").unwrap()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_specifiers_true() {
|
||||
let pairs = [
|
||||
// Test the equality operation
|
||||
("2.0", "==2"),
|
||||
("2.0", "==2.0"),
|
||||
("2.0", "==2.0.0"),
|
||||
("2.0+deadbeef", "==2"),
|
||||
("2.0+deadbeef", "==2.0"),
|
||||
("2.0+deadbeef", "==2.0.0"),
|
||||
("2.0+deadbeef", "==2+deadbeef"),
|
||||
("2.0+deadbeef", "==2.0+deadbeef"),
|
||||
("2.0+deadbeef", "==2.0.0+deadbeef"),
|
||||
("2.0+deadbeef.0", "==2.0.0+deadbeef.00"),
|
||||
// Test the equality operation with a prefix
|
||||
("2.dev1", "==2.*"),
|
||||
("2a1", "==2.*"),
|
||||
("2a1.post1", "==2.*"),
|
||||
("2b1", "==2.*"),
|
||||
("2b1.dev1", "==2.*"),
|
||||
("2c1", "==2.*"),
|
||||
("2c1.post1.dev1", "==2.*"),
|
||||
("2c1.post1.dev1", "==2.0.*"),
|
||||
("2rc1", "==2.*"),
|
||||
("2rc1", "==2.0.*"),
|
||||
("2", "==2.*"),
|
||||
("2", "==2.0.*"),
|
||||
("2", "==0!2.*"),
|
||||
("0!2", "==2.*"),
|
||||
("2.0", "==2.*"),
|
||||
("2.0.0", "==2.*"),
|
||||
("2.1+local.version", "==2.1.*"),
|
||||
// Test the in-equality operation
|
||||
("2.1", "!=2"),
|
||||
("2.1", "!=2.0"),
|
||||
("2.0.1", "!=2"),
|
||||
("2.0.1", "!=2.0"),
|
||||
("2.0.1", "!=2.0.0"),
|
||||
("2.0", "!=2.0+deadbeef"),
|
||||
// Test the in-equality operation with a prefix
|
||||
("2.0", "!=3.*"),
|
||||
("2.1", "!=2.0.*"),
|
||||
// Test the greater than equal operation
|
||||
("2.0", ">=2"),
|
||||
("2.0", ">=2.0"),
|
||||
("2.0", ">=2.0.0"),
|
||||
("2.0.post1", ">=2"),
|
||||
("2.0.post1.dev1", ">=2"),
|
||||
("3", ">=2"),
|
||||
// Test the less than equal operation
|
||||
("2.0", "<=2"),
|
||||
("2.0", "<=2.0"),
|
||||
("2.0", "<=2.0.0"),
|
||||
("2.0.dev1", "<=2"),
|
||||
("2.0a1", "<=2"),
|
||||
("2.0a1.dev1", "<=2"),
|
||||
("2.0b1", "<=2"),
|
||||
("2.0b1.post1", "<=2"),
|
||||
("2.0c1", "<=2"),
|
||||
("2.0c1.post1.dev1", "<=2"),
|
||||
("2.0rc1", "<=2"),
|
||||
("1", "<=2"),
|
||||
// Test the greater than operation
|
||||
("3", ">2"),
|
||||
("2.1", ">2.0"),
|
||||
("2.0.1", ">2"),
|
||||
("2.1.post1", ">2"),
|
||||
("2.1+local.version", ">2"),
|
||||
// Test the less than operation
|
||||
("1", "<2"),
|
||||
("2.0", "<2.1"),
|
||||
("2.0.dev0", "<2.1"),
|
||||
// Test the compatibility operation
|
||||
("1", "~=1.0"),
|
||||
("1.0.1", "~=1.0"),
|
||||
("1.1", "~=1.0"),
|
||||
("1.9999999", "~=1.0"),
|
||||
("1.1", "~=1.0a1"),
|
||||
("2022.01.01", "~=2022.01.01"),
|
||||
// Test that epochs are handled sanely
|
||||
("2!1.0", "~=2!1.0"),
|
||||
("2!1.0", "==2!1.*"),
|
||||
("2!1.0", "==2!1.0"),
|
||||
("2!1.0", "!=1.0"),
|
||||
("1.0", "!=2!1.0"),
|
||||
("1.0", "<=2!0.1"),
|
||||
("2!1.0", ">=2.0"),
|
||||
("1.0", "<2!0.1"),
|
||||
("2!1.0", ">2.0"),
|
||||
// Test some normalization rules
|
||||
("2.0.5", ">2.0dev"),
|
||||
];
|
||||
|
||||
for (s_version, s_spec) in pairs {
|
||||
let version = s_version.parse::<Version>().unwrap();
|
||||
let spec = s_spec.parse::<VersionSpecifier>().unwrap();
|
||||
assert!(
|
||||
spec.contains(&version),
|
||||
"{s_version} {s_spec}\nversion repr: {:?}\nspec version repr: {:?}",
|
||||
version.as_bloated_debug(),
|
||||
spec.version.as_bloated_debug(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_specifier_false() {
|
||||
let pairs = [
|
||||
// Test the equality operation
|
||||
("2.1", "==2"),
|
||||
("2.1", "==2.0"),
|
||||
("2.1", "==2.0.0"),
|
||||
("2.0", "==2.0+deadbeef"),
|
||||
// Test the equality operation with a prefix
|
||||
("2.0", "==3.*"),
|
||||
("2.1", "==2.0.*"),
|
||||
// Test the in-equality operation
|
||||
("2.0", "!=2"),
|
||||
("2.0", "!=2.0"),
|
||||
("2.0", "!=2.0.0"),
|
||||
("2.0+deadbeef", "!=2"),
|
||||
("2.0+deadbeef", "!=2.0"),
|
||||
("2.0+deadbeef", "!=2.0.0"),
|
||||
("2.0+deadbeef", "!=2+deadbeef"),
|
||||
("2.0+deadbeef", "!=2.0+deadbeef"),
|
||||
("2.0+deadbeef", "!=2.0.0+deadbeef"),
|
||||
("2.0+deadbeef.0", "!=2.0.0+deadbeef.00"),
|
||||
// Test the in-equality operation with a prefix
|
||||
("2.dev1", "!=2.*"),
|
||||
("2a1", "!=2.*"),
|
||||
("2a1.post1", "!=2.*"),
|
||||
("2b1", "!=2.*"),
|
||||
("2b1.dev1", "!=2.*"),
|
||||
("2c1", "!=2.*"),
|
||||
("2c1.post1.dev1", "!=2.*"),
|
||||
("2c1.post1.dev1", "!=2.0.*"),
|
||||
("2rc1", "!=2.*"),
|
||||
("2rc1", "!=2.0.*"),
|
||||
("2", "!=2.*"),
|
||||
("2", "!=2.0.*"),
|
||||
("2.0", "!=2.*"),
|
||||
("2.0.0", "!=2.*"),
|
||||
// Test the greater than equal operation
|
||||
("2.0.dev1", ">=2"),
|
||||
("2.0a1", ">=2"),
|
||||
("2.0a1.dev1", ">=2"),
|
||||
("2.0b1", ">=2"),
|
||||
("2.0b1.post1", ">=2"),
|
||||
("2.0c1", ">=2"),
|
||||
("2.0c1.post1.dev1", ">=2"),
|
||||
("2.0rc1", ">=2"),
|
||||
("1", ">=2"),
|
||||
// Test the less than equal operation
|
||||
("2.0.post1", "<=2"),
|
||||
("2.0.post1.dev1", "<=2"),
|
||||
("3", "<=2"),
|
||||
// Test the greater than operation
|
||||
("1", ">2"),
|
||||
("2.0.dev1", ">2"),
|
||||
("2.0a1", ">2"),
|
||||
("2.0a1.post1", ">2"),
|
||||
("2.0b1", ">2"),
|
||||
("2.0b1.dev1", ">2"),
|
||||
("2.0c1", ">2"),
|
||||
("2.0c1.post1.dev1", ">2"),
|
||||
("2.0rc1", ">2"),
|
||||
("2.0", ">2"),
|
||||
("2.0.post1", ">2"),
|
||||
("2.0.post1.dev1", ">2"),
|
||||
("2.0+local.version", ">2"),
|
||||
// Test the less than operation
|
||||
("2.0.dev1", "<2"),
|
||||
("2.0a1", "<2"),
|
||||
("2.0a1.post1", "<2"),
|
||||
("2.0b1", "<2"),
|
||||
("2.0b2.dev1", "<2"),
|
||||
("2.0c1", "<2"),
|
||||
("2.0c1.post1.dev1", "<2"),
|
||||
("2.0rc1", "<2"),
|
||||
("2.0", "<2"),
|
||||
("2.post1", "<2"),
|
||||
("2.post1.dev1", "<2"),
|
||||
("3", "<2"),
|
||||
// Test the compatibility operation
|
||||
("2.0", "~=1.0"),
|
||||
("1.1.0", "~=1.0.0"),
|
||||
("1.1.post1", "~=1.0.0"),
|
||||
// Test that epochs are handled sanely
|
||||
("1.0", "~=2!1.0"),
|
||||
("2!1.0", "~=1.0"),
|
||||
("2!1.0", "==1.0"),
|
||||
("1.0", "==2!1.0"),
|
||||
("2!1.0", "==1.*"),
|
||||
("1.0", "==2!1.*"),
|
||||
("2!1.0", "!=2!1.0"),
|
||||
];
|
||||
for (version, specifier) in pairs {
|
||||
assert!(
|
||||
!VersionSpecifier::from_str(specifier)
|
||||
.unwrap()
|
||||
.contains(&Version::from_str(version).unwrap()),
|
||||
"{version} {specifier}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_version_specifiers() {
|
||||
let result = VersionSpecifiers::from_str("~= 0.9, >= 1.0, != 1.3.4.*, < 2.0").unwrap();
|
||||
assert_eq!(
|
||||
result.0,
|
||||
[
|
||||
VersionSpecifier {
|
||||
operator: Operator::TildeEqual,
|
||||
version: Version::new([0, 9]),
|
||||
},
|
||||
VersionSpecifier {
|
||||
operator: Operator::GreaterThanEqual,
|
||||
version: Version::new([1, 0]),
|
||||
},
|
||||
VersionSpecifier {
|
||||
operator: Operator::NotEqualStar,
|
||||
version: Version::new([1, 3, 4]),
|
||||
},
|
||||
VersionSpecifier {
|
||||
operator: Operator::LessThan,
|
||||
version: Version::new([2, 0]),
|
||||
}
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_error() {
|
||||
let result = VersionSpecifiers::from_str("~= 0.9, %= 1.0, != 1.3.4.*");
|
||||
assert_eq!(
|
||||
result.unwrap_err().to_string(),
|
||||
indoc! {r"
|
||||
Failed to parse version: Unexpected end of version specifier, expected operator:
|
||||
~= 0.9, %= 1.0, != 1.3.4.*
|
||||
^^^^^^^
|
||||
"}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_non_star_after_star() {
|
||||
let result = VersionSpecifiers::from_str("== 0.9.*.1");
|
||||
assert_eq!(
|
||||
result.unwrap_err().inner.err,
|
||||
ParseErrorKind::InvalidVersion(version::PatternErrorKind::WildcardNotTrailing.into())
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_star_wrong_operator() {
|
||||
let result = VersionSpecifiers::from_str(">= 0.9.1.*");
|
||||
assert_eq!(
|
||||
result.unwrap_err().inner.err,
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorWithStar {
|
||||
operator: Operator::GreaterThanEqual,
|
||||
}
|
||||
.into()
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_word() {
|
||||
let result = VersionSpecifiers::from_str("blergh");
|
||||
assert_eq!(
|
||||
result.unwrap_err().inner.err,
|
||||
ParseErrorKind::MissingOperator.into(),
|
||||
);
|
||||
}
|
||||
|
||||
/// <https://github.com/pypa/packaging/blob/e184feef1a28a5c574ec41f5c263a3a573861f5a/tests/test_specifiers.py#L44-L84>
|
||||
#[test]
|
||||
fn test_invalid_specifier() {
|
||||
let specifiers = [
|
||||
// Operator-less specifier
|
||||
("2.0", ParseErrorKind::MissingOperator.into()),
|
||||
// Invalid operator
|
||||
(
|
||||
"=>2.0",
|
||||
ParseErrorKind::InvalidOperator(OperatorParseError {
|
||||
got: "=>".to_string(),
|
||||
})
|
||||
.into(),
|
||||
),
|
||||
// Version-less specifier
|
||||
("==", ParseErrorKind::MissingVersion.into()),
|
||||
// Local segment on operators which don't support them
|
||||
(
|
||||
"~=1.0+5",
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorLocalCombo {
|
||||
operator: Operator::TildeEqual,
|
||||
version: Version::new([1, 0]).with_local(vec![LocalSegment::Number(5)]),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
">=1.0+deadbeef",
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorLocalCombo {
|
||||
operator: Operator::GreaterThanEqual,
|
||||
version: Version::new([1, 0])
|
||||
.with_local(vec![LocalSegment::String("deadbeef".to_string())]),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"<=1.0+abc123",
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorLocalCombo {
|
||||
operator: Operator::LessThanEqual,
|
||||
version: Version::new([1, 0])
|
||||
.with_local(vec![LocalSegment::String("abc123".to_string())]),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
">1.0+watwat",
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorLocalCombo {
|
||||
operator: Operator::GreaterThan,
|
||||
version: Version::new([1, 0])
|
||||
.with_local(vec![LocalSegment::String("watwat".to_string())]),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"<1.0+1.0",
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorLocalCombo {
|
||||
operator: Operator::LessThan,
|
||||
version: Version::new([1, 0])
|
||||
.with_local(vec![LocalSegment::Number(1), LocalSegment::Number(0)]),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
// Prefix matching on operators which don't support them
|
||||
(
|
||||
"~=1.0.*",
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorWithStar {
|
||||
operator: Operator::TildeEqual,
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
">=1.0.*",
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorWithStar {
|
||||
operator: Operator::GreaterThanEqual,
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"<=1.0.*",
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorWithStar {
|
||||
operator: Operator::LessThanEqual,
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
">1.0.*",
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorWithStar {
|
||||
operator: Operator::GreaterThan,
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"<1.0.*",
|
||||
ParseErrorKind::InvalidSpecifier(
|
||||
BuildErrorKind::OperatorWithStar {
|
||||
operator: Operator::LessThan,
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
// Combination of local and prefix matching on operators which do
|
||||
// support one or the other
|
||||
(
|
||||
"==1.0.*+5",
|
||||
ParseErrorKind::InvalidVersion(version::PatternErrorKind::WildcardNotTrailing.into())
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"!=1.0.*+deadbeef",
|
||||
ParseErrorKind::InvalidVersion(version::PatternErrorKind::WildcardNotTrailing.into())
|
||||
.into(),
|
||||
),
|
||||
// Prefix matching cannot be used with a pre-release, post-release,
|
||||
// dev or local version
|
||||
(
|
||||
"==2.0a1.*",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::ErrorKind::UnexpectedEnd {
|
||||
version: "2.0a1".to_string(),
|
||||
remaining: ".*".to_string(),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"!=2.0a1.*",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::ErrorKind::UnexpectedEnd {
|
||||
version: "2.0a1".to_string(),
|
||||
remaining: ".*".to_string(),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"==2.0.post1.*",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::ErrorKind::UnexpectedEnd {
|
||||
version: "2.0.post1".to_string(),
|
||||
remaining: ".*".to_string(),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"!=2.0.post1.*",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::ErrorKind::UnexpectedEnd {
|
||||
version: "2.0.post1".to_string(),
|
||||
remaining: ".*".to_string(),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"==2.0.dev1.*",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::ErrorKind::UnexpectedEnd {
|
||||
version: "2.0.dev1".to_string(),
|
||||
remaining: ".*".to_string(),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"!=2.0.dev1.*",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::ErrorKind::UnexpectedEnd {
|
||||
version: "2.0.dev1".to_string(),
|
||||
remaining: ".*".to_string(),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"==1.0+5.*",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::ErrorKind::LocalEmpty { precursor: '.' }.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"!=1.0+deadbeef.*",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::ErrorKind::LocalEmpty { precursor: '.' }.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
// Prefix matching must appear at the end
|
||||
(
|
||||
"==1.0.*.5",
|
||||
ParseErrorKind::InvalidVersion(version::PatternErrorKind::WildcardNotTrailing.into())
|
||||
.into(),
|
||||
),
|
||||
// Compatible operator requires 2 digits in the release operator
|
||||
(
|
||||
"~=1",
|
||||
ParseErrorKind::InvalidSpecifier(BuildErrorKind::CompatibleRelease.into()).into(),
|
||||
),
|
||||
// Cannot use a prefix matching after a .devN version
|
||||
(
|
||||
"==1.0.dev1.*",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::ErrorKind::UnexpectedEnd {
|
||||
version: "1.0.dev1".to_string(),
|
||||
remaining: ".*".to_string(),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
"!=1.0.dev1.*",
|
||||
ParseErrorKind::InvalidVersion(
|
||||
version::ErrorKind::UnexpectedEnd {
|
||||
version: "1.0.dev1".to_string(),
|
||||
remaining: ".*".to_string(),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
];
|
||||
for (specifier, error) in specifiers {
|
||||
assert_eq!(VersionSpecifier::from_str(specifier).unwrap_err(), error);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_display_start() {
|
||||
assert_eq!(
|
||||
VersionSpecifier::from_str("== 1.1.*")
|
||||
.unwrap()
|
||||
.to_string(),
|
||||
"==1.1.*"
|
||||
);
|
||||
assert_eq!(
|
||||
VersionSpecifier::from_str("!= 1.1.*")
|
||||
.unwrap()
|
||||
.to_string(),
|
||||
"!=1.1.*"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_version_specifiers_str() {
|
||||
assert_eq!(
|
||||
VersionSpecifiers::from_str(">= 3.7").unwrap().to_string(),
|
||||
">=3.7"
|
||||
);
|
||||
assert_eq!(
|
||||
VersionSpecifiers::from_str(">=3.7, < 4.0, != 3.9.0")
|
||||
.unwrap()
|
||||
.to_string(),
|
||||
">=3.7, !=3.9.0, <4.0"
|
||||
);
|
||||
}
|
||||
|
||||
/// These occur in the simple api, e.g.
|
||||
/// <https://pypi.org/simple/geopandas/?format=application/vnd.pypi.simple.v1+json>
|
||||
#[test]
|
||||
fn test_version_specifiers_empty() {
|
||||
assert_eq!(VersionSpecifiers::from_str("").unwrap().to_string(), "");
|
||||
}
|
||||
|
||||
/// All non-ASCII version specifiers are invalid, but the user can still
|
||||
/// attempt to parse a non-ASCII string as a version specifier. This
|
||||
/// ensures no panics occur and that the error reported has correct info.
|
||||
#[test]
|
||||
fn non_ascii_version_specifier() {
|
||||
let s = "💩";
|
||||
let err = s.parse::<VersionSpecifiers>().unwrap_err();
|
||||
assert_eq!(err.inner.start, 0);
|
||||
assert_eq!(err.inner.end, 4);
|
||||
|
||||
// The first test here is plain ASCII and it gives the
|
||||
// expected result: the error starts at codepoint 12,
|
||||
// which is the start of `>5.%`.
|
||||
let s = ">=3.7, <4.0,>5.%";
|
||||
let err = s.parse::<VersionSpecifiers>().unwrap_err();
|
||||
assert_eq!(err.inner.start, 12);
|
||||
assert_eq!(err.inner.end, 16);
|
||||
// In this case, we replace a single ASCII codepoint
|
||||
// with U+3000 IDEOGRAPHIC SPACE. Its *visual* width is
|
||||
// 2 despite it being a single codepoint. This causes
|
||||
// the offsets in the error reporting logic to become
|
||||
// incorrect.
|
||||
//
|
||||
// ... it did. This bug was fixed by switching to byte
|
||||
// offsets.
|
||||
let s = ">=3.7,\u{3000}<4.0,>5.%";
|
||||
let err = s.parse::<VersionSpecifiers>().unwrap_err();
|
||||
assert_eq!(err.inner.start, 14);
|
||||
assert_eq!(err.inner.end, 18);
|
||||
}
|
||||
|
||||
/// Tests the human readable error messages generated from an invalid
|
||||
/// sequence of version specifiers.
|
||||
#[test]
|
||||
fn error_message_version_specifiers_parse_error() {
|
||||
let specs = ">=1.2.3, 5.4.3, >=3.4.5";
|
||||
let err = VersionSpecifierParseError {
|
||||
kind: Box::new(ParseErrorKind::MissingOperator),
|
||||
};
|
||||
let inner = Box::new(VersionSpecifiersParseErrorInner {
|
||||
err,
|
||||
line: specs.to_string(),
|
||||
start: 8,
|
||||
end: 14,
|
||||
});
|
||||
let err = VersionSpecifiersParseError { inner };
|
||||
assert_eq!(err, VersionSpecifiers::from_str(specs).unwrap_err());
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"\
|
||||
Failed to parse version: Unexpected end of version specifier, expected operator:
|
||||
>=1.2.3, 5.4.3, >=3.4.5
|
||||
^^^^^^
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
/// Tests the human readable error messages generated when building an
|
||||
/// invalid version specifier.
|
||||
#[test]
|
||||
fn error_message_version_specifier_build_error() {
|
||||
let err = VersionSpecifierBuildError {
|
||||
kind: Box::new(BuildErrorKind::CompatibleRelease),
|
||||
};
|
||||
let op = Operator::TildeEqual;
|
||||
let v = Version::new([5]);
|
||||
let vpat = VersionPattern::verbatim(v);
|
||||
assert_eq!(err, VersionSpecifier::from_pattern(op, vpat).unwrap_err());
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"The ~= operator requires at least two segments in the release version"
|
||||
);
|
||||
}
|
||||
|
||||
/// Tests the human readable error messages generated from parsing invalid
|
||||
/// version specifier.
|
||||
#[test]
|
||||
fn error_message_version_specifier_parse_error() {
|
||||
let err = VersionSpecifierParseError {
|
||||
kind: Box::new(ParseErrorKind::InvalidSpecifier(
|
||||
VersionSpecifierBuildError {
|
||||
kind: Box::new(BuildErrorKind::CompatibleRelease),
|
||||
},
|
||||
)),
|
||||
};
|
||||
assert_eq!(err, VersionSpecifier::from_str("~=5").unwrap_err());
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"The ~= operator requires at least two segments in the release version"
|
||||
);
|
||||
}
|
|
@ -15,6 +15,7 @@ authors = { workspace = true }
|
|||
[lib]
|
||||
name = "uv_pep508"
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
|
|
@ -997,787 +997,5 @@ fn parse_pep508_requirement<T: Pep508Url>(
|
|||
})
|
||||
}
|
||||
|
||||
/// Half of these tests are copied from <https://github.com/pypa/packaging/pull/624>
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::env;
|
||||
use std::str::FromStr;
|
||||
|
||||
use insta::assert_snapshot;
|
||||
use url::Url;
|
||||
|
||||
use uv_normalize::{ExtraName, InvalidNameError, PackageName};
|
||||
use uv_pep440::{Operator, Version, VersionPattern, VersionSpecifier};
|
||||
|
||||
use crate::cursor::Cursor;
|
||||
use crate::marker::{parse, MarkerExpression, MarkerTree, MarkerValueVersion};
|
||||
use crate::{
|
||||
MarkerOperator, MarkerValueString, Requirement, TracingReporter, VerbatimUrl, VersionOrUrl,
|
||||
};
|
||||
|
||||
fn parse_pep508_err(input: &str) -> String {
|
||||
Requirement::<VerbatimUrl>::from_str(input)
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
#[cfg(feature = "non-pep508-extensions")]
|
||||
fn parse_unnamed_err(input: &str) -> String {
|
||||
crate::UnnamedRequirement::<VerbatimUrl>::from_str(input)
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
#[test]
|
||||
fn test_preprocess_url_windows() {
|
||||
use std::path::PathBuf;
|
||||
|
||||
let actual = crate::parse_url::<VerbatimUrl>(
|
||||
&mut Cursor::new("file:///C:/Users/ferris/wheel-0.42.0.tar.gz"),
|
||||
None,
|
||||
)
|
||||
.unwrap()
|
||||
.to_file_path();
|
||||
let expected = PathBuf::from(r"C:\Users\ferris\wheel-0.42.0.tar.gz");
|
||||
assert_eq!(actual, Ok(expected));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_empty() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(""),
|
||||
@r"
|
||||
Empty field is not allowed for PEP508
|
||||
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_start() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("_name"),
|
||||
@"
|
||||
Expected package name starting with an alphanumeric character, found `_`
|
||||
_name
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_end() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name_"),
|
||||
@"
|
||||
Package name must end with an alphanumeric character, not '_'
|
||||
name_
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn basic_examples() {
|
||||
let input = r"requests[security,tests]==2.8.*,>=2.8.1 ; python_full_version < '2.7'";
|
||||
let requests = Requirement::<Url>::from_str(input).unwrap();
|
||||
assert_eq!(input, requests.to_string());
|
||||
let expected = Requirement {
|
||||
name: PackageName::from_str("requests").unwrap(),
|
||||
extras: vec![
|
||||
ExtraName::from_str("security").unwrap(),
|
||||
ExtraName::from_str("tests").unwrap(),
|
||||
],
|
||||
version_or_url: Some(VersionOrUrl::VersionSpecifier(
|
||||
[
|
||||
VersionSpecifier::from_pattern(
|
||||
Operator::Equal,
|
||||
VersionPattern::wildcard(Version::new([2, 8])),
|
||||
)
|
||||
.unwrap(),
|
||||
VersionSpecifier::from_pattern(
|
||||
Operator::GreaterThanEqual,
|
||||
VersionPattern::verbatim(Version::new([2, 8, 1])),
|
||||
)
|
||||
.unwrap(),
|
||||
]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
)),
|
||||
marker: MarkerTree::expression(MarkerExpression::Version {
|
||||
key: MarkerValueVersion::PythonFullVersion,
|
||||
specifier: VersionSpecifier::from_pattern(
|
||||
uv_pep440::Operator::LessThan,
|
||||
"2.7".parse().unwrap(),
|
||||
)
|
||||
.unwrap(),
|
||||
}),
|
||||
origin: None,
|
||||
};
|
||||
assert_eq!(requests, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parenthesized_single() {
|
||||
let numpy = Requirement::<Url>::from_str("numpy ( >=1.19 )").unwrap();
|
||||
assert_eq!(numpy.name.as_ref(), "numpy");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parenthesized_double() {
|
||||
let numpy = Requirement::<Url>::from_str("numpy ( >=1.19, <2.0 )").unwrap();
|
||||
assert_eq!(numpy.name.as_ref(), "numpy");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn versions_single() {
|
||||
let numpy = Requirement::<Url>::from_str("numpy >=1.19 ").unwrap();
|
||||
assert_eq!(numpy.name.as_ref(), "numpy");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn versions_double() {
|
||||
let numpy = Requirement::<Url>::from_str("numpy >=1.19, <2.0 ").unwrap();
|
||||
assert_eq!(numpy.name.as_ref(), "numpy");
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(feature = "non-pep508-extensions")]
|
||||
fn direct_url_no_extras() {
|
||||
let numpy = crate::UnnamedRequirement::<VerbatimUrl>::from_str("https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl").unwrap();
|
||||
assert_eq!(numpy.url.to_string(), "https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl");
|
||||
assert_eq!(numpy.extras, vec![]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(all(unix, feature = "non-pep508-extensions"))]
|
||||
fn direct_url_extras() {
|
||||
let numpy = crate::UnnamedRequirement::<VerbatimUrl>::from_str(
|
||||
"/path/to/numpy-1.26.4-cp312-cp312-win32.whl[dev]",
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
numpy.url.to_string(),
|
||||
"file:///path/to/numpy-1.26.4-cp312-cp312-win32.whl"
|
||||
);
|
||||
assert_eq!(numpy.extras, vec![ExtraName::from_str("dev").unwrap()]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(all(windows, feature = "non-pep508-extensions"))]
|
||||
fn direct_url_extras() {
|
||||
let numpy = crate::UnnamedRequirement::<VerbatimUrl>::from_str(
|
||||
"C:\\path\\to\\numpy-1.26.4-cp312-cp312-win32.whl[dev]",
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
numpy.url.to_string(),
|
||||
"file:///C:/path/to/numpy-1.26.4-cp312-cp312-win32.whl"
|
||||
);
|
||||
assert_eq!(numpy.extras, vec![ExtraName::from_str("dev").unwrap()]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extras_eof1() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("black["),
|
||||
@"
|
||||
Missing closing bracket (expected ']', found end of dependency specification)
|
||||
black[
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extras_eof2() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("black[d"),
|
||||
@"
|
||||
Missing closing bracket (expected ']', found end of dependency specification)
|
||||
black[d
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extras_eof3() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("black[d,"),
|
||||
@"
|
||||
Missing closing bracket (expected ']', found end of dependency specification)
|
||||
black[d,
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extras_illegal_start1() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("black[ö]"),
|
||||
@"
|
||||
Expected an alphanumeric character starting the extra name, found `ö`
|
||||
black[ö]
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extras_illegal_start2() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("black[_d]"),
|
||||
@"
|
||||
Expected an alphanumeric character starting the extra name, found `_`
|
||||
black[_d]
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extras_illegal_start3() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("black[,]"),
|
||||
@"
|
||||
Expected either alphanumerical character (starting the extra name) or `]` (ending the extras section), found `,`
|
||||
black[,]
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extras_illegal_character() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("black[jüpyter]"),
|
||||
@"
|
||||
Invalid character in extras name, expected an alphanumeric character, `-`, `_`, `.`, `,` or `]`, found `ü`
|
||||
black[jüpyter]
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extras1() {
|
||||
let numpy = Requirement::<Url>::from_str("black[d]").unwrap();
|
||||
assert_eq!(numpy.extras, vec![ExtraName::from_str("d").unwrap()]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extras2() {
|
||||
let numpy = Requirement::<Url>::from_str("black[d,jupyter]").unwrap();
|
||||
assert_eq!(
|
||||
numpy.extras,
|
||||
vec![
|
||||
ExtraName::from_str("d").unwrap(),
|
||||
ExtraName::from_str("jupyter").unwrap(),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty_extras() {
|
||||
let black = Requirement::<Url>::from_str("black[]").unwrap();
|
||||
assert_eq!(black.extras, vec![]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty_extras_with_spaces() {
|
||||
let black = Requirement::<Url>::from_str("black[ ]").unwrap();
|
||||
assert_eq!(black.extras, vec![]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extra_with_trailing_comma() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("black[d,]"),
|
||||
@"
|
||||
Expected an alphanumeric character starting the extra name, found `]`
|
||||
black[d,]
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_parenthesized_pep440() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("numpy ( ><1.19 )"),
|
||||
@"
|
||||
no such comparison operator \"><\", must be one of ~= == != <= >= < > ===
|
||||
numpy ( ><1.19 )
|
||||
^^^^^^^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_parenthesized_parenthesis() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("numpy ( >=1.19"),
|
||||
@"
|
||||
Missing closing parenthesis (expected ')', found end of dependency specification)
|
||||
numpy ( >=1.19
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_whats_that() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("numpy % 1.16"),
|
||||
@"
|
||||
Expected one of `@`, `(`, `<`, `=`, `>`, `~`, `!`, `;`, found `%`
|
||||
numpy % 1.16
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn url() {
|
||||
let pip_url =
|
||||
Requirement::from_str("pip @ https://github.com/pypa/pip/archive/1.3.1.zip#sha1=da9234ee9982d4bbb3c72346a6de940a148ea686")
|
||||
.unwrap();
|
||||
let url = "https://github.com/pypa/pip/archive/1.3.1.zip#sha1=da9234ee9982d4bbb3c72346a6de940a148ea686";
|
||||
let expected = Requirement {
|
||||
name: PackageName::from_str("pip").unwrap(),
|
||||
extras: vec![],
|
||||
marker: MarkerTree::TRUE,
|
||||
version_or_url: Some(VersionOrUrl::Url(Url::parse(url).unwrap())),
|
||||
origin: None,
|
||||
};
|
||||
assert_eq!(pip_url, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_marker_parsing() {
|
||||
let marker = r#"python_version == "2.7" and (sys_platform == "win32" or (os_name == "linux" and implementation_name == 'cpython'))"#;
|
||||
let actual = parse::parse_markers_cursor::<VerbatimUrl>(
|
||||
&mut Cursor::new(marker),
|
||||
&mut TracingReporter,
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
let mut a = MarkerTree::expression(MarkerExpression::Version {
|
||||
key: MarkerValueVersion::PythonVersion,
|
||||
specifier: VersionSpecifier::from_pattern(
|
||||
uv_pep440::Operator::Equal,
|
||||
"2.7".parse().unwrap(),
|
||||
)
|
||||
.unwrap(),
|
||||
});
|
||||
let mut b = MarkerTree::expression(MarkerExpression::String {
|
||||
key: MarkerValueString::SysPlatform,
|
||||
operator: MarkerOperator::Equal,
|
||||
value: "win32".to_string(),
|
||||
});
|
||||
let mut c = MarkerTree::expression(MarkerExpression::String {
|
||||
key: MarkerValueString::OsName,
|
||||
operator: MarkerOperator::Equal,
|
||||
value: "linux".to_string(),
|
||||
});
|
||||
let d = MarkerTree::expression(MarkerExpression::String {
|
||||
key: MarkerValueString::ImplementationName,
|
||||
operator: MarkerOperator::Equal,
|
||||
value: "cpython".to_string(),
|
||||
});
|
||||
|
||||
c.and(d);
|
||||
b.or(c);
|
||||
a.and(b);
|
||||
|
||||
assert_eq!(a, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn name_and_marker() {
|
||||
Requirement::<Url>::from_str(r#"numpy; sys_platform == "win32" or (os_name == "linux" and implementation_name == 'cpython')"#).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_marker_incomplete1() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"numpy; sys_platform"),
|
||||
@"
|
||||
Expected a valid marker operator (such as `>=` or `not in`), found ``
|
||||
numpy; sys_platform
|
||||
^
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_marker_incomplete2() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"numpy; sys_platform =="),
|
||||
@r"
|
||||
Expected marker value, found end of dependency specification
|
||||
numpy; sys_platform ==
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_marker_incomplete3() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r#"numpy; sys_platform == "win32" or"#),
|
||||
@r#"
|
||||
Expected marker value, found end of dependency specification
|
||||
numpy; sys_platform == "win32" or
|
||||
^"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_marker_incomplete4() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r#"numpy; sys_platform == "win32" or (os_name == "linux""#),
|
||||
@r#"
|
||||
Expected ')', found end of dependency specification
|
||||
numpy; sys_platform == "win32" or (os_name == "linux"
|
||||
^"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_marker_incomplete5() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r#"numpy; sys_platform == "win32" or (os_name == "linux" and"#),
|
||||
@r#"
|
||||
Expected marker value, found end of dependency specification
|
||||
numpy; sys_platform == "win32" or (os_name == "linux" and
|
||||
^"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_pep440() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"numpy >=1.1.*"),
|
||||
@r"
|
||||
Operator >= cannot be used with a wildcard version specifier
|
||||
numpy >=1.1.*
|
||||
^^^^^^^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_no_name() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"==0.0"),
|
||||
@r"
|
||||
Expected package name starting with an alphanumeric character, found `=`
|
||||
==0.0
|
||||
^
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_unnamedunnamed_url() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"git+https://github.com/pallets/flask.git"),
|
||||
@"
|
||||
URL requirement must be preceded by a package name. Add the name of the package before the URL (e.g., `package_name @ https://...`).
|
||||
git+https://github.com/pallets/flask.git
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_unnamed_file_path() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"/path/to/flask.tar.gz"),
|
||||
@r###"
|
||||
URL requirement must be preceded by a package name. Add the name of the package before the URL (e.g., `package_name @ /path/to/file`).
|
||||
/path/to/flask.tar.gz
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_no_comma_between_extras() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"name[bar baz]"),
|
||||
@"
|
||||
Expected either `,` (separating extras) or `]` (ending the extras section), found `b`
|
||||
name[bar baz]
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extra_comma_after_extras() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"name[bar, baz,]"),
|
||||
@"
|
||||
Expected an alphanumeric character starting the extra name, found `]`
|
||||
name[bar, baz,]
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extras_not_closed() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"name[bar, baz >= 1.0"),
|
||||
@"
|
||||
Expected either `,` (separating extras) or `]` (ending the extras section), found `>`
|
||||
name[bar, baz >= 1.0
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_no_space_after_url() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"name @ https://example.com/; extra == 'example'"),
|
||||
@"
|
||||
Missing space before ';', the end of the URL is ambiguous
|
||||
name @ https://example.com/; extra == 'example'
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_name_at_nothing() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"name @"),
|
||||
@"
|
||||
Expected URL
|
||||
name @
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_error_invalid_marker_key() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"name; invalid_name"),
|
||||
@"
|
||||
Expected a quoted string or a valid marker name, found `invalid_name`
|
||||
name; invalid_name
|
||||
^^^^^^^^^^^^
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_markers_invalid_order() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name; '3.7' <= invalid_name"),
|
||||
@"
|
||||
Expected a quoted string or a valid marker name, found `invalid_name`
|
||||
name; '3.7' <= invalid_name
|
||||
^^^^^^^^^^^^
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_markers_notin() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name; '3.7' notin python_version"),
|
||||
@"
|
||||
Expected a valid marker operator (such as `>=` or `not in`), found `notin`
|
||||
name; '3.7' notin python_version
|
||||
^^^^^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_missing_quote() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name; python_version == 3.10"),
|
||||
@"
|
||||
Expected a quoted string or a valid marker name, found `3.10`
|
||||
name; python_version == 3.10
|
||||
^^^^
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_markers_inpython_version() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name; '3.6'inpython_version"),
|
||||
@"
|
||||
Expected a valid marker operator (such as `>=` or `not in`), found `inpython_version`
|
||||
name; '3.6'inpython_version
|
||||
^^^^^^^^^^^^^^^^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_markers_not_python_version() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name; '3.7' not python_version"),
|
||||
@"
|
||||
Expected `i`, found `p`
|
||||
name; '3.7' not python_version
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_markers_invalid_operator() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name; '3.7' ~ python_version"),
|
||||
@"
|
||||
Expected a valid marker operator (such as `>=` or `not in`), found `~`
|
||||
name; '3.7' ~ python_version
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_invalid_prerelease() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name==1.0.org1"),
|
||||
@r###"
|
||||
after parsing `1.0`, found `.org1`, which is not part of a valid version
|
||||
name==1.0.org1
|
||||
^^^^^^^^^^
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_no_version_value() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name=="),
|
||||
@"
|
||||
Unexpected end of version specifier, expected version
|
||||
name==
|
||||
^^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_no_version_operator() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name 1.0"),
|
||||
@"
|
||||
Expected one of `@`, `(`, `<`, `=`, `>`, `~`, `!`, `;`, found `1`
|
||||
name 1.0
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_random_char() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name >= 1.0 #"),
|
||||
@"
|
||||
Trailing `#` is not allowed
|
||||
name >= 1.0 #
|
||||
^^^^^^^^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(feature = "non-pep508-extensions")]
|
||||
fn error_invalid_extra_unnamed_url() {
|
||||
assert_snapshot!(
|
||||
parse_unnamed_err("/foo-3.0.0-py3-none-any.whl[d,]"),
|
||||
@r###"
|
||||
Expected an alphanumeric character starting the extra name, found `]`
|
||||
/foo-3.0.0-py3-none-any.whl[d,]
|
||||
^
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
/// Check that the relative path support feature toggle works.
|
||||
#[test]
|
||||
fn non_pep508_paths() {
|
||||
let requirements = &[
|
||||
"foo @ file://./foo",
|
||||
"foo @ file://foo-3.0.0-py3-none-any.whl",
|
||||
"foo @ file:foo-3.0.0-py3-none-any.whl",
|
||||
"foo @ ./foo-3.0.0-py3-none-any.whl",
|
||||
];
|
||||
let cwd = env::current_dir().unwrap();
|
||||
|
||||
for requirement in requirements {
|
||||
assert_eq!(
|
||||
Requirement::<VerbatimUrl>::parse(requirement, &cwd).is_ok(),
|
||||
cfg!(feature = "non-pep508-extensions"),
|
||||
"{}: {:?}",
|
||||
requirement,
|
||||
Requirement::<VerbatimUrl>::parse(requirement, &cwd)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_space_after_operator() {
|
||||
let requirement = Requirement::<Url>::from_str("pytest;python_version<='4.0'").unwrap();
|
||||
assert_eq!(
|
||||
requirement.to_string(),
|
||||
"pytest ; python_full_version < '4.1'"
|
||||
);
|
||||
|
||||
let requirement = Requirement::<Url>::from_str("pytest;'4.0'>=python_version").unwrap();
|
||||
assert_eq!(
|
||||
requirement.to_string(),
|
||||
"pytest ; python_full_version < '4.1'"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn path_with_fragment() {
|
||||
let requirements = if cfg!(windows) {
|
||||
&[
|
||||
"wheel @ file:///C:/Users/ferris/wheel-0.42.0.whl#hash=somehash",
|
||||
"wheel @ C:/Users/ferris/wheel-0.42.0.whl#hash=somehash",
|
||||
]
|
||||
} else {
|
||||
&[
|
||||
"wheel @ file:///Users/ferris/wheel-0.42.0.whl#hash=somehash",
|
||||
"wheel @ /Users/ferris/wheel-0.42.0.whl#hash=somehash",
|
||||
]
|
||||
};
|
||||
|
||||
for requirement in requirements {
|
||||
// Extract the URL.
|
||||
let Some(VersionOrUrl::Url(url)) = Requirement::<VerbatimUrl>::from_str(requirement)
|
||||
.unwrap()
|
||||
.version_or_url
|
||||
else {
|
||||
unreachable!("Expected a URL")
|
||||
};
|
||||
|
||||
// Assert that the fragment and path have been separated correctly.
|
||||
assert_eq!(url.fragment(), Some("hash=somehash"));
|
||||
assert!(
|
||||
url.path().ends_with("/Users/ferris/wheel-0.42.0.whl"),
|
||||
"Expected the path to end with `/Users/ferris/wheel-0.42.0.whl`, found `{}`",
|
||||
url.path()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_extra_marker() -> Result<(), InvalidNameError> {
|
||||
let requirement = Requirement::<Url>::from_str("pytest").unwrap();
|
||||
let expected = Requirement::<Url>::from_str("pytest; extra == 'dotenv'").unwrap();
|
||||
let actual = requirement.with_extra_marker(&ExtraName::from_str("dotenv")?);
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let requirement = Requirement::<Url>::from_str("pytest; '4.0' >= python_version").unwrap();
|
||||
let expected =
|
||||
Requirement::from_str("pytest; '4.0' >= python_version and extra == 'dotenv'").unwrap();
|
||||
let actual = requirement.with_extra_marker(&ExtraName::from_str("dotenv")?);
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let requirement = Requirement::<Url>::from_str(
|
||||
"pytest; '4.0' >= python_version or sys_platform == 'win32'",
|
||||
)
|
||||
.unwrap();
|
||||
let expected = Requirement::from_str(
|
||||
"pytest; ('4.0' >= python_version or sys_platform == 'win32') and extra == 'dotenv'",
|
||||
)
|
||||
.unwrap();
|
||||
let actual = requirement.with_extra_marker(&ExtraName::from_str("dotenv")?);
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
|
@ -1236,89 +1236,4 @@ impl fmt::Debug for NodeId {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{NodeId, INTERNER};
|
||||
use crate::MarkerExpression;
|
||||
|
||||
fn expr(s: &str) -> NodeId {
|
||||
INTERNER
|
||||
.lock()
|
||||
.expression(MarkerExpression::from_str(s).unwrap().unwrap())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn basic() {
|
||||
let m = || INTERNER.lock();
|
||||
let extra_foo = expr("extra == 'foo'");
|
||||
assert!(!extra_foo.is_false());
|
||||
|
||||
let os_foo = expr("os_name == 'foo'");
|
||||
let extra_and_os_foo = m().or(extra_foo, os_foo);
|
||||
assert!(!extra_and_os_foo.is_false());
|
||||
assert!(!m().and(extra_foo, os_foo).is_false());
|
||||
|
||||
let trivially_true = m().or(extra_and_os_foo, extra_and_os_foo.not());
|
||||
assert!(!trivially_true.is_false());
|
||||
assert!(trivially_true.is_true());
|
||||
|
||||
let trivially_false = m().and(extra_foo, extra_foo.not());
|
||||
assert!(trivially_false.is_false());
|
||||
|
||||
let e = m().or(trivially_false, os_foo);
|
||||
assert!(!e.is_false());
|
||||
|
||||
let extra_not_foo = expr("extra != 'foo'");
|
||||
assert!(m().and(extra_foo, extra_not_foo).is_false());
|
||||
assert!(m().or(extra_foo, extra_not_foo).is_true());
|
||||
|
||||
let os_geq_bar = expr("os_name >= 'bar'");
|
||||
assert!(!os_geq_bar.is_false());
|
||||
|
||||
let os_le_bar = expr("os_name < 'bar'");
|
||||
assert!(m().and(os_geq_bar, os_le_bar).is_false());
|
||||
assert!(m().or(os_geq_bar, os_le_bar).is_true());
|
||||
|
||||
let os_leq_bar = expr("os_name <= 'bar'");
|
||||
assert!(!m().and(os_geq_bar, os_leq_bar).is_false());
|
||||
assert!(m().or(os_geq_bar, os_leq_bar).is_true());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version() {
|
||||
let m = || INTERNER.lock();
|
||||
let eq_3 = expr("python_version == '3'");
|
||||
let neq_3 = expr("python_version != '3'");
|
||||
let geq_3 = expr("python_version >= '3'");
|
||||
let leq_3 = expr("python_version <= '3'");
|
||||
|
||||
let eq_2 = expr("python_version == '2'");
|
||||
let eq_1 = expr("python_version == '1'");
|
||||
assert!(m().and(eq_2, eq_1).is_false());
|
||||
|
||||
assert_eq!(eq_3.not(), neq_3);
|
||||
assert_eq!(eq_3, neq_3.not());
|
||||
|
||||
assert!(m().and(eq_3, neq_3).is_false());
|
||||
assert!(m().or(eq_3, neq_3).is_true());
|
||||
|
||||
assert_eq!(m().and(eq_3, geq_3), eq_3);
|
||||
assert_eq!(m().and(eq_3, leq_3), eq_3);
|
||||
|
||||
assert_eq!(m().and(geq_3, leq_3), eq_3);
|
||||
|
||||
assert!(!m().and(geq_3, leq_3).is_false());
|
||||
assert!(m().or(geq_3, leq_3).is_true());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simplify() {
|
||||
let m = || INTERNER.lock();
|
||||
let x86 = expr("platform_machine == 'x86_64'");
|
||||
let not_x86 = expr("platform_machine != 'x86_64'");
|
||||
let windows = expr("platform_machine == 'Windows'");
|
||||
|
||||
let a = m().and(x86, windows);
|
||||
let b = m().and(not_x86, windows);
|
||||
assert_eq!(m().or(a, b), windows);
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
84
crates/uv-pep508/src/marker/algebra/tests.rs
Normal file
84
crates/uv-pep508/src/marker/algebra/tests.rs
Normal file
|
@ -0,0 +1,84 @@
|
|||
use super::{NodeId, INTERNER};
|
||||
use crate::MarkerExpression;
|
||||
|
||||
fn expr(s: &str) -> NodeId {
|
||||
INTERNER
|
||||
.lock()
|
||||
.expression(MarkerExpression::from_str(s).unwrap().unwrap())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn basic() {
|
||||
let m = || INTERNER.lock();
|
||||
let extra_foo = expr("extra == 'foo'");
|
||||
assert!(!extra_foo.is_false());
|
||||
|
||||
let os_foo = expr("os_name == 'foo'");
|
||||
let extra_and_os_foo = m().or(extra_foo, os_foo);
|
||||
assert!(!extra_and_os_foo.is_false());
|
||||
assert!(!m().and(extra_foo, os_foo).is_false());
|
||||
|
||||
let trivially_true = m().or(extra_and_os_foo, extra_and_os_foo.not());
|
||||
assert!(!trivially_true.is_false());
|
||||
assert!(trivially_true.is_true());
|
||||
|
||||
let trivially_false = m().and(extra_foo, extra_foo.not());
|
||||
assert!(trivially_false.is_false());
|
||||
|
||||
let e = m().or(trivially_false, os_foo);
|
||||
assert!(!e.is_false());
|
||||
|
||||
let extra_not_foo = expr("extra != 'foo'");
|
||||
assert!(m().and(extra_foo, extra_not_foo).is_false());
|
||||
assert!(m().or(extra_foo, extra_not_foo).is_true());
|
||||
|
||||
let os_geq_bar = expr("os_name >= 'bar'");
|
||||
assert!(!os_geq_bar.is_false());
|
||||
|
||||
let os_le_bar = expr("os_name < 'bar'");
|
||||
assert!(m().and(os_geq_bar, os_le_bar).is_false());
|
||||
assert!(m().or(os_geq_bar, os_le_bar).is_true());
|
||||
|
||||
let os_leq_bar = expr("os_name <= 'bar'");
|
||||
assert!(!m().and(os_geq_bar, os_leq_bar).is_false());
|
||||
assert!(m().or(os_geq_bar, os_leq_bar).is_true());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version() {
|
||||
let m = || INTERNER.lock();
|
||||
let eq_3 = expr("python_version == '3'");
|
||||
let neq_3 = expr("python_version != '3'");
|
||||
let geq_3 = expr("python_version >= '3'");
|
||||
let leq_3 = expr("python_version <= '3'");
|
||||
|
||||
let eq_2 = expr("python_version == '2'");
|
||||
let eq_1 = expr("python_version == '1'");
|
||||
assert!(m().and(eq_2, eq_1).is_false());
|
||||
|
||||
assert_eq!(eq_3.not(), neq_3);
|
||||
assert_eq!(eq_3, neq_3.not());
|
||||
|
||||
assert!(m().and(eq_3, neq_3).is_false());
|
||||
assert!(m().or(eq_3, neq_3).is_true());
|
||||
|
||||
assert_eq!(m().and(eq_3, geq_3), eq_3);
|
||||
assert_eq!(m().and(eq_3, leq_3), eq_3);
|
||||
|
||||
assert_eq!(m().and(geq_3, leq_3), eq_3);
|
||||
|
||||
assert!(!m().and(geq_3, leq_3).is_false());
|
||||
assert!(m().or(geq_3, leq_3).is_true());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simplify() {
|
||||
let m = || INTERNER.lock();
|
||||
let x86 = expr("platform_machine == 'x86_64'");
|
||||
let not_x86 = expr("platform_machine != 'x86_64'");
|
||||
let windows = expr("platform_machine == 'Windows'");
|
||||
|
||||
let a = m().and(x86, windows);
|
||||
let b = m().and(not_x86, windows);
|
||||
assert_eq!(m().or(a, b), windows);
|
||||
}
|
801
crates/uv-pep508/src/tests.rs
Normal file
801
crates/uv-pep508/src/tests.rs
Normal file
|
@ -0,0 +1,801 @@
|
|||
//! Half of these tests are copied from <https://github.com/pypa/packaging/pull/624>
|
||||
|
||||
use std::env;
|
||||
use std::str::FromStr;
|
||||
|
||||
use insta::assert_snapshot;
|
||||
use url::Url;
|
||||
|
||||
use uv_normalize::{ExtraName, InvalidNameError, PackageName};
|
||||
use uv_pep440::{Operator, Version, VersionPattern, VersionSpecifier};
|
||||
|
||||
use crate::cursor::Cursor;
|
||||
use crate::marker::{parse, MarkerExpression, MarkerTree, MarkerValueVersion};
|
||||
use crate::{
|
||||
MarkerOperator, MarkerValueString, Requirement, TracingReporter, VerbatimUrl, VersionOrUrl,
|
||||
};
|
||||
|
||||
fn parse_pep508_err(input: &str) -> String {
|
||||
Requirement::<VerbatimUrl>::from_str(input)
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
#[cfg(feature = "non-pep508-extensions")]
|
||||
fn parse_unnamed_err(input: &str) -> String {
|
||||
crate::UnnamedRequirement::<VerbatimUrl>::from_str(input)
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
#[test]
|
||||
fn test_preprocess_url_windows() {
|
||||
use std::path::PathBuf;
|
||||
|
||||
let actual = crate::parse_url::<VerbatimUrl>(
|
||||
&mut Cursor::new("file:///C:/Users/ferris/wheel-0.42.0.tar.gz"),
|
||||
None,
|
||||
)
|
||||
.unwrap()
|
||||
.to_file_path();
|
||||
let expected = PathBuf::from(r"C:\Users\ferris\wheel-0.42.0.tar.gz");
|
||||
assert_eq!(actual, Ok(expected));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_empty() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(""),
|
||||
@r"
|
||||
Empty field is not allowed for PEP508
|
||||
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_start() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("_name"),
|
||||
@"
|
||||
Expected package name starting with an alphanumeric character, found `_`
|
||||
_name
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_end() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name_"),
|
||||
@"
|
||||
Package name must end with an alphanumeric character, not '_'
|
||||
name_
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn basic_examples() {
|
||||
let input = r"requests[security,tests]==2.8.*,>=2.8.1 ; python_full_version < '2.7'";
|
||||
let requests = Requirement::<Url>::from_str(input).unwrap();
|
||||
assert_eq!(input, requests.to_string());
|
||||
let expected = Requirement {
|
||||
name: PackageName::from_str("requests").unwrap(),
|
||||
extras: vec![
|
||||
ExtraName::from_str("security").unwrap(),
|
||||
ExtraName::from_str("tests").unwrap(),
|
||||
],
|
||||
version_or_url: Some(VersionOrUrl::VersionSpecifier(
|
||||
[
|
||||
VersionSpecifier::from_pattern(
|
||||
Operator::Equal,
|
||||
VersionPattern::wildcard(Version::new([2, 8])),
|
||||
)
|
||||
.unwrap(),
|
||||
VersionSpecifier::from_pattern(
|
||||
Operator::GreaterThanEqual,
|
||||
VersionPattern::verbatim(Version::new([2, 8, 1])),
|
||||
)
|
||||
.unwrap(),
|
||||
]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
)),
|
||||
marker: MarkerTree::expression(MarkerExpression::Version {
|
||||
key: MarkerValueVersion::PythonFullVersion,
|
||||
specifier: VersionSpecifier::from_pattern(
|
||||
uv_pep440::Operator::LessThan,
|
||||
"2.7".parse().unwrap(),
|
||||
)
|
||||
.unwrap(),
|
||||
}),
|
||||
origin: None,
|
||||
};
|
||||
assert_eq!(requests, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parenthesized_single() {
|
||||
let numpy = Requirement::<Url>::from_str("numpy ( >=1.19 )").unwrap();
|
||||
assert_eq!(numpy.name.as_ref(), "numpy");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parenthesized_double() {
|
||||
let numpy = Requirement::<Url>::from_str("numpy ( >=1.19, <2.0 )").unwrap();
|
||||
assert_eq!(numpy.name.as_ref(), "numpy");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn versions_single() {
|
||||
let numpy = Requirement::<Url>::from_str("numpy >=1.19 ").unwrap();
|
||||
assert_eq!(numpy.name.as_ref(), "numpy");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn versions_double() {
|
||||
let numpy = Requirement::<Url>::from_str("numpy >=1.19, <2.0 ").unwrap();
|
||||
assert_eq!(numpy.name.as_ref(), "numpy");
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(feature = "non-pep508-extensions")]
|
||||
fn direct_url_no_extras() {
|
||||
let numpy = crate::UnnamedRequirement::<VerbatimUrl>::from_str("https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl").unwrap();
|
||||
assert_eq!(numpy.url.to_string(), "https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl");
|
||||
assert_eq!(numpy.extras, vec![]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(all(unix, feature = "non-pep508-extensions"))]
|
||||
fn direct_url_extras() {
|
||||
let numpy = crate::UnnamedRequirement::<VerbatimUrl>::from_str(
|
||||
"/path/to/numpy-1.26.4-cp312-cp312-win32.whl[dev]",
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
numpy.url.to_string(),
|
||||
"file:///path/to/numpy-1.26.4-cp312-cp312-win32.whl"
|
||||
);
|
||||
assert_eq!(numpy.extras, vec![ExtraName::from_str("dev").unwrap()]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(all(windows, feature = "non-pep508-extensions"))]
|
||||
fn direct_url_extras() {
|
||||
let numpy = crate::UnnamedRequirement::<VerbatimUrl>::from_str(
|
||||
"C:\\path\\to\\numpy-1.26.4-cp312-cp312-win32.whl[dev]",
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
numpy.url.to_string(),
|
||||
"file:///C:/path/to/numpy-1.26.4-cp312-cp312-win32.whl"
|
||||
);
|
||||
assert_eq!(numpy.extras, vec![ExtraName::from_str("dev").unwrap()]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extras_eof1() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("black["),
|
||||
@r#"
|
||||
Missing closing bracket (expected ']', found end of dependency specification)
|
||||
black[
|
||||
^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extras_eof2() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("black[d"),
|
||||
@r#"
|
||||
Missing closing bracket (expected ']', found end of dependency specification)
|
||||
black[d
|
||||
^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extras_eof3() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("black[d,"),
|
||||
@r#"
|
||||
Missing closing bracket (expected ']', found end of dependency specification)
|
||||
black[d,
|
||||
^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extras_illegal_start1() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("black[ö]"),
|
||||
@r#"
|
||||
Expected an alphanumeric character starting the extra name, found `ö`
|
||||
black[ö]
|
||||
^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extras_illegal_start2() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("black[_d]"),
|
||||
@r#"
|
||||
Expected an alphanumeric character starting the extra name, found `_`
|
||||
black[_d]
|
||||
^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extras_illegal_start3() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("black[,]"),
|
||||
@r#"
|
||||
Expected either alphanumerical character (starting the extra name) or `]` (ending the extras section), found `,`
|
||||
black[,]
|
||||
^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extras_illegal_character() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("black[jüpyter]"),
|
||||
@r#"
|
||||
Invalid character in extras name, expected an alphanumeric character, `-`, `_`, `.`, `,` or `]`, found `ü`
|
||||
black[jüpyter]
|
||||
^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extras1() {
|
||||
let numpy = Requirement::<Url>::from_str("black[d]").unwrap();
|
||||
assert_eq!(numpy.extras, vec![ExtraName::from_str("d").unwrap()]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extras2() {
|
||||
let numpy = Requirement::<Url>::from_str("black[d,jupyter]").unwrap();
|
||||
assert_eq!(
|
||||
numpy.extras,
|
||||
vec![
|
||||
ExtraName::from_str("d").unwrap(),
|
||||
ExtraName::from_str("jupyter").unwrap(),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty_extras() {
|
||||
let black = Requirement::<Url>::from_str("black[]").unwrap();
|
||||
assert_eq!(black.extras, vec![]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty_extras_with_spaces() {
|
||||
let black = Requirement::<Url>::from_str("black[ ]").unwrap();
|
||||
assert_eq!(black.extras, vec![]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extra_with_trailing_comma() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("black[d,]"),
|
||||
@"
|
||||
Expected an alphanumeric character starting the extra name, found `]`
|
||||
black[d,]
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_parenthesized_pep440() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("numpy ( ><1.19 )"),
|
||||
@"
|
||||
no such comparison operator \"><\", must be one of ~= == != <= >= < > ===
|
||||
numpy ( ><1.19 )
|
||||
^^^^^^^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_parenthesized_parenthesis() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("numpy ( >=1.19"),
|
||||
@r#"
|
||||
Missing closing parenthesis (expected ')', found end of dependency specification)
|
||||
numpy ( >=1.19
|
||||
^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_whats_that() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("numpy % 1.16"),
|
||||
@r#"
|
||||
Expected one of `@`, `(`, `<`, `=`, `>`, `~`, `!`, `;`, found `%`
|
||||
numpy % 1.16
|
||||
^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn url() {
|
||||
let pip_url =
|
||||
Requirement::from_str("pip @ https://github.com/pypa/pip/archive/1.3.1.zip#sha1=da9234ee9982d4bbb3c72346a6de940a148ea686")
|
||||
.unwrap();
|
||||
let url = "https://github.com/pypa/pip/archive/1.3.1.zip#sha1=da9234ee9982d4bbb3c72346a6de940a148ea686";
|
||||
let expected = Requirement {
|
||||
name: PackageName::from_str("pip").unwrap(),
|
||||
extras: vec![],
|
||||
marker: MarkerTree::TRUE,
|
||||
version_or_url: Some(VersionOrUrl::Url(Url::parse(url).unwrap())),
|
||||
origin: None,
|
||||
};
|
||||
assert_eq!(pip_url, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_marker_parsing() {
|
||||
let marker = r#"python_version == "2.7" and (sys_platform == "win32" or (os_name == "linux" and implementation_name == 'cpython'))"#;
|
||||
let actual =
|
||||
parse::parse_markers_cursor::<VerbatimUrl>(&mut Cursor::new(marker), &mut TracingReporter)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
let mut a = MarkerTree::expression(MarkerExpression::Version {
|
||||
key: MarkerValueVersion::PythonVersion,
|
||||
specifier: VersionSpecifier::from_pattern(
|
||||
uv_pep440::Operator::Equal,
|
||||
"2.7".parse().unwrap(),
|
||||
)
|
||||
.unwrap(),
|
||||
});
|
||||
let mut b = MarkerTree::expression(MarkerExpression::String {
|
||||
key: MarkerValueString::SysPlatform,
|
||||
operator: MarkerOperator::Equal,
|
||||
value: "win32".to_string(),
|
||||
});
|
||||
let mut c = MarkerTree::expression(MarkerExpression::String {
|
||||
key: MarkerValueString::OsName,
|
||||
operator: MarkerOperator::Equal,
|
||||
value: "linux".to_string(),
|
||||
});
|
||||
let d = MarkerTree::expression(MarkerExpression::String {
|
||||
key: MarkerValueString::ImplementationName,
|
||||
operator: MarkerOperator::Equal,
|
||||
value: "cpython".to_string(),
|
||||
});
|
||||
|
||||
c.and(d);
|
||||
b.or(c);
|
||||
a.and(b);
|
||||
|
||||
assert_eq!(a, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn name_and_marker() {
|
||||
Requirement::<Url>::from_str(r#"numpy; sys_platform == "win32" or (os_name == "linux" and implementation_name == 'cpython')"#).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_marker_incomplete1() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"numpy; sys_platform"),
|
||||
@r#"
|
||||
Expected a valid marker operator (such as `>=` or `not in`), found ``
|
||||
numpy; sys_platform
|
||||
^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_marker_incomplete2() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"numpy; sys_platform =="),
|
||||
@r#"
|
||||
Expected marker value, found end of dependency specification
|
||||
numpy; sys_platform ==
|
||||
^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_marker_incomplete3() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r#"numpy; sys_platform == "win32" or"#),
|
||||
@r#"
|
||||
Expected marker value, found end of dependency specification
|
||||
numpy; sys_platform == "win32" or
|
||||
^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_marker_incomplete4() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r#"numpy; sys_platform == "win32" or (os_name == "linux""#),
|
||||
@r#"
|
||||
Expected ')', found end of dependency specification
|
||||
numpy; sys_platform == "win32" or (os_name == "linux"
|
||||
^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_marker_incomplete5() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r#"numpy; sys_platform == "win32" or (os_name == "linux" and"#),
|
||||
@r#"
|
||||
Expected marker value, found end of dependency specification
|
||||
numpy; sys_platform == "win32" or (os_name == "linux" and
|
||||
^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_pep440() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"numpy >=1.1.*"),
|
||||
@r#"
|
||||
Operator >= cannot be used with a wildcard version specifier
|
||||
numpy >=1.1.*
|
||||
^^^^^^^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_no_name() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"==0.0"),
|
||||
@r"
|
||||
Expected package name starting with an alphanumeric character, found `=`
|
||||
==0.0
|
||||
^
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_unnamedunnamed_url() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"git+https://github.com/pallets/flask.git"),
|
||||
@"
|
||||
URL requirement must be preceded by a package name. Add the name of the package before the URL (e.g., `package_name @ https://...`).
|
||||
git+https://github.com/pallets/flask.git
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_unnamed_file_path() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"/path/to/flask.tar.gz"),
|
||||
@r###"
|
||||
URL requirement must be preceded by a package name. Add the name of the package before the URL (e.g., `package_name @ /path/to/file`).
|
||||
/path/to/flask.tar.gz
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_no_comma_between_extras() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"name[bar baz]"),
|
||||
@r#"
|
||||
Expected either `,` (separating extras) or `]` (ending the extras section), found `b`
|
||||
name[bar baz]
|
||||
^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extra_comma_after_extras() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"name[bar, baz,]"),
|
||||
@r#"
|
||||
Expected an alphanumeric character starting the extra name, found `]`
|
||||
name[bar, baz,]
|
||||
^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_extras_not_closed() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"name[bar, baz >= 1.0"),
|
||||
@r#"
|
||||
Expected either `,` (separating extras) or `]` (ending the extras section), found `>`
|
||||
name[bar, baz >= 1.0
|
||||
^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_no_space_after_url() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"name @ https://example.com/; extra == 'example'"),
|
||||
@r#"
|
||||
Missing space before ';', the end of the URL is ambiguous
|
||||
name @ https://example.com/; extra == 'example'
|
||||
^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_name_at_nothing() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"name @"),
|
||||
@r#"
|
||||
Expected URL
|
||||
name @
|
||||
^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_error_invalid_marker_key() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err(r"name; invalid_name"),
|
||||
@r#"
|
||||
Expected a quoted string or a valid marker name, found `invalid_name`
|
||||
name; invalid_name
|
||||
^^^^^^^^^^^^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_markers_invalid_order() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name; '3.7' <= invalid_name"),
|
||||
@r#"
|
||||
Expected a quoted string or a valid marker name, found `invalid_name`
|
||||
name; '3.7' <= invalid_name
|
||||
^^^^^^^^^^^^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_markers_notin() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name; '3.7' notin python_version"),
|
||||
@"
|
||||
Expected a valid marker operator (such as `>=` or `not in`), found `notin`
|
||||
name; '3.7' notin python_version
|
||||
^^^^^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_missing_quote() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name; python_version == 3.10"),
|
||||
@"
|
||||
Expected a quoted string or a valid marker name, found `3.10`
|
||||
name; python_version == 3.10
|
||||
^^^^
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_markers_inpython_version() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name; '3.6'inpython_version"),
|
||||
@r#"
|
||||
Expected a valid marker operator (such as `>=` or `not in`), found `inpython_version`
|
||||
name; '3.6'inpython_version
|
||||
^^^^^^^^^^^^^^^^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_markers_not_python_version() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name; '3.7' not python_version"),
|
||||
@"
|
||||
Expected `i`, found `p`
|
||||
name; '3.7' not python_version
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_markers_invalid_operator() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name; '3.7' ~ python_version"),
|
||||
@"
|
||||
Expected a valid marker operator (such as `>=` or `not in`), found `~`
|
||||
name; '3.7' ~ python_version
|
||||
^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_invalid_prerelease() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name==1.0.org1"),
|
||||
@r###"
|
||||
after parsing `1.0`, found `.org1`, which is not part of a valid version
|
||||
name==1.0.org1
|
||||
^^^^^^^^^^
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_no_version_value() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name=="),
|
||||
@"
|
||||
Unexpected end of version specifier, expected version
|
||||
name==
|
||||
^^"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_no_version_operator() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name 1.0"),
|
||||
@r#"
|
||||
Expected one of `@`, `(`, `<`, `=`, `>`, `~`, `!`, `;`, found `1`
|
||||
name 1.0
|
||||
^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_random_char() {
|
||||
assert_snapshot!(
|
||||
parse_pep508_err("name >= 1.0 #"),
|
||||
@r##"
|
||||
Trailing `#` is not allowed
|
||||
name >= 1.0 #
|
||||
^^^^^^^^
|
||||
"##
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(feature = "non-pep508-extensions")]
|
||||
fn error_invalid_extra_unnamed_url() {
|
||||
assert_snapshot!(
|
||||
parse_unnamed_err("/foo-3.0.0-py3-none-any.whl[d,]"),
|
||||
@r#"
|
||||
Expected an alphanumeric character starting the extra name, found `]`
|
||||
/foo-3.0.0-py3-none-any.whl[d,]
|
||||
^
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
/// Check that the relative path support feature toggle works.
|
||||
#[test]
|
||||
fn non_pep508_paths() {
|
||||
let requirements = &[
|
||||
"foo @ file://./foo",
|
||||
"foo @ file://foo-3.0.0-py3-none-any.whl",
|
||||
"foo @ file:foo-3.0.0-py3-none-any.whl",
|
||||
"foo @ ./foo-3.0.0-py3-none-any.whl",
|
||||
];
|
||||
let cwd = env::current_dir().unwrap();
|
||||
|
||||
for requirement in requirements {
|
||||
assert_eq!(
|
||||
Requirement::<VerbatimUrl>::parse(requirement, &cwd).is_ok(),
|
||||
cfg!(feature = "non-pep508-extensions"),
|
||||
"{}: {:?}",
|
||||
requirement,
|
||||
Requirement::<VerbatimUrl>::parse(requirement, &cwd)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_space_after_operator() {
|
||||
let requirement = Requirement::<Url>::from_str("pytest;python_version<='4.0'").unwrap();
|
||||
assert_eq!(
|
||||
requirement.to_string(),
|
||||
"pytest ; python_full_version < '4.1'"
|
||||
);
|
||||
|
||||
let requirement = Requirement::<Url>::from_str("pytest;'4.0'>=python_version").unwrap();
|
||||
assert_eq!(
|
||||
requirement.to_string(),
|
||||
"pytest ; python_full_version < '4.1'"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn path_with_fragment() {
|
||||
let requirements = if cfg!(windows) {
|
||||
&[
|
||||
"wheel @ file:///C:/Users/ferris/wheel-0.42.0.whl#hash=somehash",
|
||||
"wheel @ C:/Users/ferris/wheel-0.42.0.whl#hash=somehash",
|
||||
]
|
||||
} else {
|
||||
&[
|
||||
"wheel @ file:///Users/ferris/wheel-0.42.0.whl#hash=somehash",
|
||||
"wheel @ /Users/ferris/wheel-0.42.0.whl#hash=somehash",
|
||||
]
|
||||
};
|
||||
|
||||
for requirement in requirements {
|
||||
// Extract the URL.
|
||||
let Some(VersionOrUrl::Url(url)) = Requirement::<VerbatimUrl>::from_str(requirement)
|
||||
.unwrap()
|
||||
.version_or_url
|
||||
else {
|
||||
unreachable!("Expected a URL")
|
||||
};
|
||||
|
||||
// Assert that the fragment and path have been separated correctly.
|
||||
assert_eq!(url.fragment(), Some("hash=somehash"));
|
||||
assert!(
|
||||
url.path().ends_with("/Users/ferris/wheel-0.42.0.whl"),
|
||||
"Expected the path to end with `/Users/ferris/wheel-0.42.0.whl`, found `{}`",
|
||||
url.path()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_extra_marker() -> Result<(), InvalidNameError> {
|
||||
let requirement = Requirement::<Url>::from_str("pytest").unwrap();
|
||||
let expected = Requirement::<Url>::from_str("pytest; extra == 'dotenv'").unwrap();
|
||||
let actual = requirement.with_extra_marker(&ExtraName::from_str("dotenv")?);
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let requirement = Requirement::<Url>::from_str("pytest; '4.0' >= python_version").unwrap();
|
||||
let expected =
|
||||
Requirement::from_str("pytest; '4.0' >= python_version and extra == 'dotenv'").unwrap();
|
||||
let actual = requirement.with_extra_marker(&ExtraName::from_str("dotenv")?);
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let requirement =
|
||||
Requirement::<Url>::from_str("pytest; '4.0' >= python_version or sys_platform == 'win32'")
|
||||
.unwrap();
|
||||
let expected = Requirement::from_str(
|
||||
"pytest; ('4.0' >= python_version or sys_platform == 'win32') and extra == 'dotenv'",
|
||||
)
|
||||
.unwrap();
|
||||
let actual = requirement.with_extra_marker(&ExtraName::from_str("dotenv")?);
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -516,61 +516,4 @@ impl std::fmt::Display for Scheme {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn scheme() {
|
||||
assert_eq!(
|
||||
split_scheme("file:///home/ferris/project/scripts"),
|
||||
Some(("file", "///home/ferris/project/scripts"))
|
||||
);
|
||||
assert_eq!(
|
||||
split_scheme("file:home/ferris/project/scripts"),
|
||||
Some(("file", "home/ferris/project/scripts"))
|
||||
);
|
||||
assert_eq!(
|
||||
split_scheme("https://example.com"),
|
||||
Some(("https", "//example.com"))
|
||||
);
|
||||
assert_eq!(split_scheme("https:"), Some(("https", "")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fragment() {
|
||||
assert_eq!(
|
||||
split_fragment(Path::new(
|
||||
"file:///home/ferris/project/scripts#hash=somehash"
|
||||
)),
|
||||
(
|
||||
Cow::Owned(PathBuf::from("file:///home/ferris/project/scripts")),
|
||||
Some("hash=somehash")
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
split_fragment(Path::new("file:home/ferris/project/scripts#hash=somehash")),
|
||||
(
|
||||
Cow::Owned(PathBuf::from("file:home/ferris/project/scripts")),
|
||||
Some("hash=somehash")
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
split_fragment(Path::new("/home/ferris/project/scripts#hash=somehash")),
|
||||
(
|
||||
Cow::Owned(PathBuf::from("/home/ferris/project/scripts")),
|
||||
Some("hash=somehash")
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
split_fragment(Path::new("file:///home/ferris/project/scripts")),
|
||||
(
|
||||
Cow::Borrowed(Path::new("file:///home/ferris/project/scripts")),
|
||||
None
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
split_fragment(Path::new("")),
|
||||
(Cow::Borrowed(Path::new("")), None)
|
||||
);
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
56
crates/uv-pep508/src/verbatim_url/tests.rs
Normal file
56
crates/uv-pep508/src/verbatim_url/tests.rs
Normal file
|
@ -0,0 +1,56 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn scheme() {
|
||||
assert_eq!(
|
||||
split_scheme("file:///home/ferris/project/scripts"),
|
||||
Some(("file", "///home/ferris/project/scripts"))
|
||||
);
|
||||
assert_eq!(
|
||||
split_scheme("file:home/ferris/project/scripts"),
|
||||
Some(("file", "home/ferris/project/scripts"))
|
||||
);
|
||||
assert_eq!(
|
||||
split_scheme("https://example.com"),
|
||||
Some(("https", "//example.com"))
|
||||
);
|
||||
assert_eq!(split_scheme("https:"), Some(("https", "")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fragment() {
|
||||
assert_eq!(
|
||||
split_fragment(Path::new(
|
||||
"file:///home/ferris/project/scripts#hash=somehash"
|
||||
)),
|
||||
(
|
||||
Cow::Owned(PathBuf::from("file:///home/ferris/project/scripts")),
|
||||
Some("hash=somehash")
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
split_fragment(Path::new("file:home/ferris/project/scripts#hash=somehash")),
|
||||
(
|
||||
Cow::Owned(PathBuf::from("file:home/ferris/project/scripts")),
|
||||
Some("hash=somehash")
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
split_fragment(Path::new("/home/ferris/project/scripts#hash=somehash")),
|
||||
(
|
||||
Cow::Owned(PathBuf::from("/home/ferris/project/scripts")),
|
||||
Some("hash=somehash")
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
split_fragment(Path::new("file:///home/ferris/project/scripts")),
|
||||
(
|
||||
Cow::Borrowed(Path::new("file:///home/ferris/project/scripts")),
|
||||
None
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
split_fragment(Path::new("")),
|
||||
(Cow::Borrowed(Path::new("")), None)
|
||||
);
|
||||
}
|
|
@ -3,6 +3,9 @@ name = "uv-performance-flate2-backend"
|
|||
version = "0.1.0"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[target.'cfg(not(any(target_arch = "s390x", target_arch = "powerpc64")))'.dependencies]
|
||||
flate2 = { version = "1.0.28", default-features = false, features = ["zlib-ng"] }
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue