mirror of
https://github.com/astral-sh/uv.git
synced 2025-11-01 20:31:12 +00:00
Add spans to all significant tasks (#740)
I've tried to investigate puffin's performance wrt to builds and parallelism in general, but found the previous instrumentation to granular. I've tried to add spans to every function that either needs noticeable io or cpu resources without creating duplication. This also fixes some wrong tracing usage on async functions (https://docs.rs/tracing/latest/tracing/struct.Span.html#in-asynchronous-code) and some spans that weren't actually entered.
This commit is contained in:
parent
a3d8b3d9ca
commit
26f597a787
12 changed files with 171 additions and 118 deletions
|
|
@ -6,7 +6,7 @@ use std::path::Path;
|
|||
use configparser::ini::Ini;
|
||||
use fs_err as fs;
|
||||
use fs_err::File;
|
||||
use tracing::{debug, span, Level};
|
||||
use tracing::{debug, info_span};
|
||||
|
||||
use pypi_types::DirectUrl;
|
||||
|
||||
|
|
@ -51,7 +51,7 @@ pub fn install_wheel(
|
|||
let metadata = dist_info_metadata(&dist_info_prefix, &wheel)?;
|
||||
let (name, _version) = parse_metadata(&dist_info_prefix, &metadata)?;
|
||||
|
||||
let _my_span = span!(Level::DEBUG, "install_wheel", name);
|
||||
let _my_span = info_span!("install_wheel", name);
|
||||
|
||||
// We're going step by step though
|
||||
// https://packaging.python.org/en/latest/specifications/binary-distribution-format/#installing-a-wheel-distribution-1-0-py32-none-any-whl
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ use mailparse::MailHeaderMap;
|
|||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use sha2::{Digest, Sha256};
|
||||
use tempfile::tempdir;
|
||||
use tracing::{debug, error, span, warn, Level};
|
||||
use tracing::{debug, error, instrument, warn};
|
||||
use walkdir::WalkDir;
|
||||
use zip::result::ZipError;
|
||||
use zip::write::FileOptions;
|
||||
|
|
@ -894,6 +894,7 @@ pub fn parse_key_value_file(
|
|||
///
|
||||
/// Wheel 1.0: <https://www.python.org/dev/peps/pep-0427/>
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
#[instrument(skip_all, fields(name = %filename.name))]
|
||||
pub fn install_wheel(
|
||||
location: &InstallLocation<LockedDir>,
|
||||
reader: impl Read + Seek,
|
||||
|
|
@ -907,7 +908,6 @@ pub fn install_wheel(
|
|||
sys_executable: impl AsRef<Path>,
|
||||
) -> Result<String, Error> {
|
||||
let name = &filename.name;
|
||||
let _my_span = span!(Level::DEBUG, "install_wheel", name = name.as_ref());
|
||||
|
||||
let base_location = location.venv_root();
|
||||
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ use tempfile::{tempdir, tempdir_in, TempDir};
|
|||
use thiserror::Error;
|
||||
use tokio::process::Command;
|
||||
use tokio::sync::Mutex;
|
||||
use tracing::{debug, info_span, instrument};
|
||||
use tracing::{debug, info_span, instrument, Instrument};
|
||||
|
||||
use pep508_rs::Requirement;
|
||||
use puffin_extract::extract_source;
|
||||
|
|
@ -390,11 +390,12 @@ impl SourceBuild {
|
|||
};
|
||||
let span = info_span!(
|
||||
"run_python_script",
|
||||
name="prepare_metadata_for_build_wheel",
|
||||
script="prepare_metadata_for_build_wheel",
|
||||
python_version = %self.venv.interpreter().version()
|
||||
);
|
||||
let output = run_python_script(&self.venv, &script, &self.source_tree).await?;
|
||||
drop(span);
|
||||
let output = run_python_script(&self.venv, &script, &self.source_tree)
|
||||
.instrument(span)
|
||||
.await?;
|
||||
if !output.status.success() {
|
||||
return Err(Error::from_command_output(
|
||||
"Build backend failed to determine metadata through `prepare_metadata_for_build_wheel`".to_string(),
|
||||
|
|
@ -432,7 +433,7 @@ impl SourceBuild {
|
|||
/// dir.
|
||||
///
|
||||
/// <https://packaging.python.org/en/latest/specifications/source-distribution-format/>
|
||||
#[instrument(skip(self, wheel_dir), fields(package_id = self.package_id))]
|
||||
#[instrument(skip_all, fields(package_id = self.package_id))]
|
||||
pub async fn build(&self, wheel_dir: &Path) -> Result<String, Error> {
|
||||
// The build scripts run with the extracted root as cwd, so they need the absolute path.
|
||||
let wheel_dir = fs::canonicalize(wheel_dir)?;
|
||||
|
|
@ -452,10 +453,16 @@ impl SourceBuild {
|
|||
}
|
||||
// We checked earlier that setup.py exists.
|
||||
let python_interpreter = self.venv.python_executable();
|
||||
let span = info_span!(
|
||||
"run_python_script",
|
||||
script="setup.py bdist_wheel",
|
||||
python_version = %self.venv.interpreter().version()
|
||||
);
|
||||
let output = Command::new(&python_interpreter)
|
||||
.args(["setup.py", "bdist_wheel"])
|
||||
.current_dir(&self.source_tree)
|
||||
.output()
|
||||
.instrument(span)
|
||||
.await
|
||||
.map_err(|err| Error::CommandFailed(python_interpreter, err))?;
|
||||
if !output.status.success() {
|
||||
|
|
@ -508,11 +515,12 @@ impl SourceBuild {
|
|||
};
|
||||
let span = info_span!(
|
||||
"run_python_script",
|
||||
name=format!("build_{}", self.build_kind),
|
||||
script=format!("build_{}", self.build_kind),
|
||||
python_version = %self.venv.interpreter().version()
|
||||
);
|
||||
let output = run_python_script(&self.venv, &script, &self.source_tree).await?;
|
||||
drop(span);
|
||||
let output = run_python_script(&self.venv, &script, &self.source_tree)
|
||||
.instrument(span)
|
||||
.await?;
|
||||
if !output.status.success() {
|
||||
return Err(Error::from_command_output(
|
||||
format!(
|
||||
|
|
@ -585,11 +593,12 @@ async fn create_pep517_build_environment(
|
|||
};
|
||||
let span = info_span!(
|
||||
"get_requires_for_build_wheel",
|
||||
name="build_wheel",
|
||||
script="build_wheel",
|
||||
python_version = %venv.interpreter().version()
|
||||
);
|
||||
let output = run_python_script(venv, &script, source_tree).await?;
|
||||
drop(span);
|
||||
let output = run_python_script(venv, &script, source_tree)
|
||||
.instrument(span)
|
||||
.await?;
|
||||
if !output.status.success() {
|
||||
return Err(Error::from_command_output(
|
||||
format!("Build backend failed to determine extra requires with `build_{build_kind}()`"),
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ use reqwest::{Request, Response};
|
|||
use reqwest_middleware::ClientWithMiddleware;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::{debug, trace, warn};
|
||||
use tracing::{debug, info_span, instrument, trace, warn, Instrument};
|
||||
|
||||
use puffin_cache::CacheEntry;
|
||||
use puffin_fs::write_atomic;
|
||||
|
|
@ -86,6 +86,7 @@ impl CachedClient {
|
|||
/// response is passed through `response_callback` and only the result is cached and returned.
|
||||
/// The `response_callback` is allowed to make subsequent requests, e.g. through the uncached
|
||||
/// client.
|
||||
#[instrument(skip_all)]
|
||||
pub async fn get_cached_with_callback<
|
||||
Payload: Serialize + DeserializeOwned,
|
||||
CallBackError,
|
||||
|
|
@ -101,8 +102,18 @@ impl CachedClient {
|
|||
Callback: FnOnce(Response) -> CallbackReturn,
|
||||
CallbackReturn: Future<Output = Result<Payload, CallBackError>>,
|
||||
{
|
||||
let cached = if let Ok(cached) = fs_err::tokio::read(cache_entry.path()).await {
|
||||
match rmp_serde::from_slice::<DataWithCachePolicy<Payload>>(&cached) {
|
||||
let read_span = info_span!("read_cache", file = %cache_entry.path().display());
|
||||
let read_result = fs_err::tokio::read(cache_entry.path())
|
||||
.instrument(read_span)
|
||||
.await;
|
||||
let cached = if let Ok(cached) = read_result {
|
||||
let parse_span = info_span!(
|
||||
"parse_cache",
|
||||
path = %cache_entry.path().display()
|
||||
);
|
||||
let parse_result = parse_span
|
||||
.in_scope(|| rmp_serde::from_slice::<DataWithCachePolicy<Payload>>(&cached));
|
||||
match parse_result {
|
||||
Ok(data) => Some(data),
|
||||
Err(err) => {
|
||||
warn!(
|
||||
|
|
@ -119,16 +130,20 @@ impl CachedClient {
|
|||
|
||||
let cached_response = self.send_cached(req, cached).await?;
|
||||
|
||||
let write_cache = info_span!("write_cache", file = %cache_entry.path().display());
|
||||
match cached_response {
|
||||
CachedResponse::FreshCache(data) => Ok(data),
|
||||
CachedResponse::NotModified(data_with_cache_policy) => {
|
||||
write_atomic(
|
||||
cache_entry.path(),
|
||||
rmp_serde::to_vec(&data_with_cache_policy).map_err(crate::Error::from)?,
|
||||
)
|
||||
async {
|
||||
let data =
|
||||
rmp_serde::to_vec(&data_with_cache_policy).map_err(crate::Error::from)?;
|
||||
write_atomic(cache_entry.path(), data)
|
||||
.await
|
||||
.map_err(crate::Error::CacheWrite)?;
|
||||
Ok(data_with_cache_policy.data)
|
||||
}
|
||||
.instrument(write_cache)
|
||||
.await
|
||||
.map_err(crate::Error::CacheWrite)?;
|
||||
Ok(data_with_cache_policy.data)
|
||||
}
|
||||
CachedResponse::ModifiedOrNew(res, cache_policy) => {
|
||||
let data = response_callback(res)
|
||||
|
|
@ -136,15 +151,19 @@ impl CachedClient {
|
|||
.map_err(|err| CachedClientError::Callback(err))?;
|
||||
if let Some(cache_policy) = cache_policy {
|
||||
let data_with_cache_policy = DataWithCachePolicy { data, cache_policy };
|
||||
fs_err::tokio::create_dir_all(cache_entry.dir())
|
||||
.await
|
||||
.map_err(crate::Error::CacheWrite)?;
|
||||
let data =
|
||||
rmp_serde::to_vec(&data_with_cache_policy).map_err(crate::Error::from)?;
|
||||
write_atomic(cache_entry.path(), data)
|
||||
.await
|
||||
.map_err(crate::Error::CacheWrite)?;
|
||||
Ok(data_with_cache_policy.data)
|
||||
async {
|
||||
fs_err::tokio::create_dir_all(cache_entry.dir())
|
||||
.await
|
||||
.map_err(crate::Error::CacheWrite)?;
|
||||
let data = rmp_serde::to_vec(&data_with_cache_policy)
|
||||
.map_err(crate::Error::from)?;
|
||||
write_atomic(cache_entry.path(), data)
|
||||
.await
|
||||
.map_err(crate::Error::CacheWrite)?;
|
||||
Ok(data_with_cache_policy.data)
|
||||
}
|
||||
.instrument(write_cache)
|
||||
.await
|
||||
} else {
|
||||
Ok(data)
|
||||
}
|
||||
|
|
@ -188,7 +207,12 @@ impl CachedClient {
|
|||
.headers_mut()
|
||||
.insert(header.0.clone(), header.1.clone());
|
||||
}
|
||||
let res = self.0.execute(req).await?.error_for_status()?;
|
||||
let res = self
|
||||
.0
|
||||
.execute(req)
|
||||
.instrument(info_span!("revalidation_request", url = url.as_str()))
|
||||
.await?
|
||||
.error_for_status()?;
|
||||
let mut converted_res = http::Response::new(());
|
||||
*converted_res.status_mut() = res.status();
|
||||
for header in res.headers() {
|
||||
|
|
@ -227,6 +251,7 @@ impl CachedClient {
|
|||
Ok(cached_response)
|
||||
}
|
||||
|
||||
#[instrument(skip_all, fields(url = req.url().as_str()))]
|
||||
async fn fresh_request<T: Serialize>(
|
||||
&self,
|
||||
req: Request,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use tl::HTMLTag;
|
||||
use tracing::instrument;
|
||||
use url::Url;
|
||||
|
||||
use pep440_rs::VersionSpecifiers;
|
||||
|
|
@ -16,6 +17,7 @@ pub(crate) struct SimpleHtml {
|
|||
|
||||
impl SimpleHtml {
|
||||
/// Parse the list of [`File`]s from the simple HTML page returned by the given URL.
|
||||
#[instrument(skip(text))]
|
||||
pub(crate) fn parse(text: &str, url: &Url) -> Result<Self, Error> {
|
||||
let dom = tl::parse(text, tl::ParserOptions::default())?;
|
||||
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ use serde::{Deserialize, Serialize};
|
|||
use tempfile::tempfile_in;
|
||||
use tokio::io::BufWriter;
|
||||
use tokio_util::compat::FuturesAsyncReadCompatExt;
|
||||
use tracing::{debug, trace};
|
||||
use tracing::{debug, info_span, instrument, trace, Instrument};
|
||||
use url::Url;
|
||||
|
||||
use distribution_filename::{DistFilename, SourceDistFilename, WheelFilename};
|
||||
|
|
@ -124,6 +124,7 @@ impl RegistryClient {
|
|||
/// "simple" here refers to [PEP 503 – Simple Repository API](https://peps.python.org/pep-0503/)
|
||||
/// and [PEP 691 – JSON-based Simple API for Python Package Indexes](https://peps.python.org/pep-0691/),
|
||||
/// which the pypi json api approximately implements.
|
||||
#[instrument("simple_api", skip_all, fields(package = %package_name))]
|
||||
pub async fn simple(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
|
|
@ -156,36 +157,39 @@ impl RegistryClient {
|
|||
.header("Accept-Encoding", "gzip")
|
||||
.header("Accept", MediaType::accepts())
|
||||
.build()?;
|
||||
let parse_simple_response = |response: Response| async {
|
||||
let content_type = response
|
||||
.headers()
|
||||
.get("content-type")
|
||||
.ok_or_else(|| Error::MissingContentType(url.clone()))?;
|
||||
let content_type = content_type
|
||||
.to_str()
|
||||
.map_err(|err| Error::InvalidContentTypeHeader(url.clone(), err))?;
|
||||
let media_type = content_type.split(';').next().unwrap_or(content_type);
|
||||
let media_type = MediaType::from_str(media_type).ok_or_else(|| {
|
||||
Error::UnsupportedMediaType(url.clone(), media_type.to_string())
|
||||
})?;
|
||||
let parse_simple_response = |response: Response| {
|
||||
async {
|
||||
let content_type = response
|
||||
.headers()
|
||||
.get("content-type")
|
||||
.ok_or_else(|| Error::MissingContentType(url.clone()))?;
|
||||
let content_type = content_type
|
||||
.to_str()
|
||||
.map_err(|err| Error::InvalidContentTypeHeader(url.clone(), err))?;
|
||||
let media_type = content_type.split(';').next().unwrap_or(content_type);
|
||||
let media_type = MediaType::from_str(media_type).ok_or_else(|| {
|
||||
Error::UnsupportedMediaType(url.clone(), media_type.to_string())
|
||||
})?;
|
||||
|
||||
match media_type {
|
||||
MediaType::Json => {
|
||||
let bytes = response.bytes().await?;
|
||||
let data: SimpleJson = serde_json::from_slice(bytes.as_ref())
|
||||
.map_err(|err| Error::from_json_err(err, url.clone()))?;
|
||||
let metadata = SimpleMetadata::from_files(data.files, package_name);
|
||||
let base = BaseUrl::from(url.clone());
|
||||
Ok((base, metadata))
|
||||
}
|
||||
MediaType::Html => {
|
||||
let text = response.text().await?;
|
||||
let SimpleHtml { base, files } = SimpleHtml::parse(&text, &url)
|
||||
.map_err(|err| Error::from_html_err(err, url.clone()))?;
|
||||
let metadata = SimpleMetadata::from_files(files, package_name);
|
||||
Ok((base, metadata))
|
||||
match media_type {
|
||||
MediaType::Json => {
|
||||
let bytes = response.bytes().await?;
|
||||
let data: SimpleJson = serde_json::from_slice(bytes.as_ref())
|
||||
.map_err(|err| Error::from_json_err(err, url.clone()))?;
|
||||
let metadata = SimpleMetadata::from_files(data.files, package_name);
|
||||
let base = BaseUrl::from(url.clone());
|
||||
Ok((base, metadata))
|
||||
}
|
||||
MediaType::Html => {
|
||||
let text = response.text().await?;
|
||||
let SimpleHtml { base, files } = SimpleHtml::parse(&text, &url)
|
||||
.map_err(|err| Error::from_html_err(err, url.clone()))?;
|
||||
let metadata = SimpleMetadata::from_files(files, package_name);
|
||||
Ok((base, metadata))
|
||||
}
|
||||
}
|
||||
}
|
||||
.instrument(info_span!("parse_simple_api", package = %package_name))
|
||||
};
|
||||
let result = self
|
||||
.client
|
||||
|
|
@ -214,6 +218,7 @@ impl RegistryClient {
|
|||
/// 1. From a [PEP 658](https://peps.python.org/pep-0658/) data-dist-info-metadata url
|
||||
/// 2. From a remote wheel by partial zip reading
|
||||
/// 3. From a (temp) download of a remote wheel (this is a fallback, the webserver should support range requests)
|
||||
#[instrument(skip(self))]
|
||||
pub async fn wheel_metadata(&self, built_dist: &BuiltDist) -> Result<Metadata21, Error> {
|
||||
let metadata = match &built_dist {
|
||||
BuiltDist::Registry(wheel) => {
|
||||
|
|
@ -272,7 +277,10 @@ impl RegistryClient {
|
|||
);
|
||||
|
||||
let response_callback = |response: Response| async {
|
||||
Metadata21::parse(response.bytes().await?.as_ref())
|
||||
let bytes = response.bytes().await?;
|
||||
|
||||
info_span!("parse_metadata21")
|
||||
.in_scope(|| Metadata21::parse(bytes.as_ref()))
|
||||
.map_err(|err| Error::MetadataParseError(filename, url.to_string(), err))
|
||||
};
|
||||
let req = self.client.uncached().get(url.clone()).build()?;
|
||||
|
|
@ -309,19 +317,23 @@ impl RegistryClient {
|
|||
// This response callback is special, we actually make a number of subsequent requests to
|
||||
// fetch the file from the remote zip.
|
||||
let client = self.client_raw.clone();
|
||||
let read_metadata_from_initial_response = |response: Response| async {
|
||||
let mut reader = AsyncHttpRangeReader::from_head_response(client, response).await?;
|
||||
trace!("Getting metadata for {filename} by range request");
|
||||
let text = wheel_metadata_from_remote_zip(filename, &mut reader).await?;
|
||||
let metadata = Metadata21::parse(text.as_bytes())
|
||||
.map_err(|err| Error::MetadataParseError(filename.clone(), url.to_string(), err))?;
|
||||
Ok(metadata)
|
||||
let read_metadata_range_request = |response: Response| {
|
||||
async {
|
||||
let mut reader = AsyncHttpRangeReader::from_head_response(client, response).await?;
|
||||
trace!("Getting metadata for {filename} by range request");
|
||||
let text = wheel_metadata_from_remote_zip(filename, &mut reader).await?;
|
||||
let metadata = Metadata21::parse(text.as_bytes()).map_err(|err| {
|
||||
Error::MetadataParseError(filename.clone(), url.to_string(), err)
|
||||
})?;
|
||||
Ok(metadata)
|
||||
}
|
||||
.instrument(info_span!("read_metadata_range_request", wheel = %filename))
|
||||
};
|
||||
|
||||
let req = self.client.uncached().head(url.clone()).build()?;
|
||||
let result = self
|
||||
.client
|
||||
.get_cached_with_callback(req, &cache_entry, read_metadata_from_initial_response)
|
||||
.get_cached_with_callback(req, &cache_entry, read_metadata_range_request)
|
||||
.await
|
||||
.map_err(crate::Error::from);
|
||||
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ use futures::StreamExt;
|
|||
use indicatif::ProgressStyle;
|
||||
use itertools::Itertools;
|
||||
use tokio::time::Instant;
|
||||
use tracing::{info, info_span, span, Level, Span};
|
||||
use tracing::{info, info_span, Span};
|
||||
use tracing_indicatif::span_ext::IndicatifSpanExt;
|
||||
|
||||
use distribution_types::IndexUrls;
|
||||
|
|
@ -97,8 +97,6 @@ pub(crate) async fn resolve_many(args: ResolveManyArgs) -> Result<()> {
|
|||
let build_dispatch = build_dispatch.clone();
|
||||
let client = client.clone();
|
||||
async move {
|
||||
let span = span!(Level::TRACE, "fetching");
|
||||
let _enter = span.enter();
|
||||
let start = Instant::now();
|
||||
|
||||
let requirement = if args.latest_version && requirement.version_or_url.is_none() {
|
||||
|
|
@ -120,7 +118,6 @@ pub(crate) async fn resolve_many(args: ResolveManyArgs) -> Result<()> {
|
|||
};
|
||||
|
||||
let result = build_dispatch.resolve(&[requirement.clone()]).await;
|
||||
|
||||
(requirement.to_string(), start.elapsed(), result)
|
||||
}
|
||||
})
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ use fs_err::tokio as fs;
|
|||
use thiserror::Error;
|
||||
use tokio::task::JoinError;
|
||||
use tokio_util::compat::FuturesAsyncReadCompatExt;
|
||||
use tracing::debug;
|
||||
use tracing::{debug, instrument};
|
||||
use url::Url;
|
||||
|
||||
use distribution_filename::{WheelFilename, WheelFilenameError};
|
||||
|
|
@ -101,6 +101,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
|||
}
|
||||
|
||||
/// Either fetch the wheel or fetch and build the source distribution
|
||||
#[instrument(skip(self))]
|
||||
pub async fn get_or_build_wheel(
|
||||
&self,
|
||||
dist: Dist,
|
||||
|
|
@ -252,6 +253,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
|
|||
/// Returns the [`Metadata21`], along with a "precise" URL for the source distribution, if
|
||||
/// possible. For example, given a Git dependency with a reference to a branch or tag, return a
|
||||
/// URL with a precise reference to the current commit of that branch or tag.
|
||||
#[instrument(skip(self))]
|
||||
pub async fn get_or_build_wheel_metadata(
|
||||
&self,
|
||||
dist: &Dist,
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ use tempfile::TempDir;
|
|||
use thiserror::Error;
|
||||
use tokio::task::JoinError;
|
||||
use tokio_util::compat::FuturesAsyncReadCompatExt;
|
||||
use tracing::{debug, info_span, warn};
|
||||
use tracing::{debug, info_span, instrument, warn, Instrument};
|
||||
use url::Url;
|
||||
use zip::result::ZipError;
|
||||
use zip::ZipArchive;
|
||||
|
|
@ -275,50 +275,53 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
|
|||
) -> Result<BuiltWheelMetadata, SourceDistError> {
|
||||
let cache_entry = cache_shard.entry(METADATA);
|
||||
|
||||
let response_callback = |response| async {
|
||||
// At this point, we're seeing a new or updated source distribution; delete all
|
||||
// wheels, and rebuild.
|
||||
match fs::remove_dir_all(&cache_entry.dir()).await {
|
||||
Ok(()) => debug!("Cleared built wheels and metadata for {source_dist}"),
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => (),
|
||||
Err(err) => return Err(err.into()),
|
||||
}
|
||||
|
||||
debug!("Downloading and building source distribution: {source_dist}");
|
||||
let task = self
|
||||
.reporter
|
||||
.as_ref()
|
||||
.map(|reporter| reporter.on_build_start(source_dist));
|
||||
|
||||
// Download the source distribution.
|
||||
let source_dist_entry = cache_shard.entry(filename);
|
||||
let cache_dir = self
|
||||
.persist_source_dist_url(response, source_dist, filename, &source_dist_entry)
|
||||
.await?;
|
||||
|
||||
// Build the source distribution.
|
||||
let (disk_filename, wheel_filename, metadata) = self
|
||||
.build_source_dist(source_dist, cache_dir, subdirectory, &cache_entry)
|
||||
.await?;
|
||||
|
||||
if let Some(task) = task {
|
||||
if let Some(reporter) = self.reporter.as_ref() {
|
||||
reporter.on_build_complete(source_dist, task);
|
||||
let download_and_build = |response| {
|
||||
async {
|
||||
// At this point, we're seeing a new or updated source distribution; delete all
|
||||
// wheels, and rebuild.
|
||||
match fs::remove_dir_all(&cache_entry.dir()).await {
|
||||
Ok(()) => debug!("Cleared built wheels and metadata for {source_dist}"),
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => (),
|
||||
Err(err) => return Err(err.into()),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Manifest::from_iter([(
|
||||
wheel_filename,
|
||||
DiskFilenameAndMetadata {
|
||||
disk_filename,
|
||||
metadata,
|
||||
},
|
||||
)]))
|
||||
debug!("Downloading and building source distribution: {source_dist}");
|
||||
let task = self
|
||||
.reporter
|
||||
.as_ref()
|
||||
.map(|reporter| reporter.on_build_start(source_dist));
|
||||
|
||||
// Download the source distribution.
|
||||
let source_dist_entry = cache_shard.entry(filename);
|
||||
let cache_dir = self
|
||||
.persist_source_dist_url(response, source_dist, filename, &source_dist_entry)
|
||||
.await?;
|
||||
|
||||
// Build the source distribution.
|
||||
let (disk_filename, wheel_filename, metadata) = self
|
||||
.build_source_dist(source_dist, cache_dir, subdirectory, &cache_entry)
|
||||
.await?;
|
||||
|
||||
if let Some(task) = task {
|
||||
if let Some(reporter) = self.reporter.as_ref() {
|
||||
reporter.on_build_complete(source_dist, task);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Manifest::from_iter([(
|
||||
wheel_filename,
|
||||
DiskFilenameAndMetadata {
|
||||
disk_filename,
|
||||
metadata,
|
||||
},
|
||||
)]))
|
||||
}
|
||||
.instrument(info_span!("download_and_build", source_dist = %source_dist))
|
||||
};
|
||||
let req = self.cached_client.uncached().get(url.clone()).build()?;
|
||||
let manifest = self
|
||||
.cached_client
|
||||
.get_cached_with_callback(req, &cache_entry, response_callback)
|
||||
.get_cached_with_callback(req, &cache_entry, download_and_build)
|
||||
.await
|
||||
.map_err(|err| match err {
|
||||
CachedClientError::Callback(err) => err,
|
||||
|
|
@ -679,6 +682,7 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
|
|||
/// Build a source distribution, storing the built wheel in the cache.
|
||||
///
|
||||
/// Returns the un-normalized disk filename, the parsed, normalized filename and the metadata
|
||||
#[instrument(skip_all, fields(dist = %dist))]
|
||||
async fn build_source_dist(
|
||||
&self,
|
||||
dist: &SourceDist,
|
||||
|
|
|
|||
|
|
@ -61,7 +61,6 @@ impl<'a, Context: BuildContext + Send + Sync> Downloader<'a, Context> {
|
|||
}
|
||||
|
||||
/// Fetch, build, and unzip the distributions in parallel.
|
||||
#[instrument(name = "download_distributions", skip_all, fields(total = distributions.len()))]
|
||||
pub fn download_stream<'stream>(
|
||||
&'stream self,
|
||||
distributions: Vec<Dist>,
|
||||
|
|
@ -81,6 +80,7 @@ impl<'a, Context: BuildContext + Send + Sync> Downloader<'a, Context> {
|
|||
}
|
||||
|
||||
/// Download, build, and unzip a set of downloaded wheels.
|
||||
#[instrument(skip_all, fields(total = distributions.len()))]
|
||||
pub async fn download(
|
||||
&self,
|
||||
mut distributions: Vec<Dist>,
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ use pubgrub::solver::{Incompatibility, State};
|
|||
use pubgrub::type_aliases::DependencyConstraints;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use tokio::select;
|
||||
use tracing::{debug, trace};
|
||||
use tracing::{debug, instrument, trace};
|
||||
use url::Url;
|
||||
|
||||
use distribution_filename::WheelFilename;
|
||||
|
|
@ -302,6 +302,7 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
|
|||
}
|
||||
|
||||
/// Run the `PubGrub` solver.
|
||||
#[instrument(skip_all)]
|
||||
async fn solve(
|
||||
&self,
|
||||
request_sink: &futures::channel::mpsc::UnboundedSender<Request>,
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ use std::collections::btree_map::Entry;
|
|||
use std::collections::BTreeMap;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use tracing::warn;
|
||||
use tracing::{instrument, warn};
|
||||
|
||||
use distribution_filename::DistFilename;
|
||||
use pep508_rs::MarkerEnvironment;
|
||||
|
|
@ -23,6 +23,7 @@ pub struct VersionMap(BTreeMap<PubGrubVersion, PrioritizedDistribution>);
|
|||
|
||||
impl VersionMap {
|
||||
/// Initialize a [`VersionMap`] from the given metadata.
|
||||
#[instrument(skip_all, fields(package_name = %package_name))]
|
||||
pub(crate) fn from_metadata(
|
||||
metadata: SimpleMetadata,
|
||||
package_name: &PackageName,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue