Add --no-build (#358)

By default, we will build source distributions for both resolving and
installing, running arbitrary code. `--no-build` adds an option to ban
this and only install from wheels, no source distributions or git builds
allowed. We also don't fetch these and instead report immediately.

I've heard from users for whom this is a requirement, i'm implementing
it now because it's helpful for testing.

I'm thinking about adding a shared `PuffinSharedArgs` struct so we don't
have to repeat each option everywhere.
This commit is contained in:
konsti 2023-11-08 16:05:15 +01:00 committed by GitHub
parent 4fe583257e
commit 2ebe40b986
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 82 additions and 6 deletions

View file

@ -39,6 +39,7 @@ pub(crate) async fn pip_compile(
prerelease_mode: PreReleaseMode,
upgrade_mode: UpgradeMode,
index_urls: Option<IndexUrls>,
no_build: bool,
cache: &Path,
mut printer: Printer,
) -> Result<ExitStatus> {
@ -137,6 +138,7 @@ pub(crate) async fn pip_compile(
cache.to_path_buf(),
venv.interpreter_info().clone(),
fs::canonicalize(venv.python_executable())?,
no_build,
);
// Resolve the dependencies.

View file

@ -30,6 +30,7 @@ pub(crate) async fn pip_sync(
sources: &[RequirementsSource],
link_mode: LinkMode,
index_urls: Option<IndexUrls>,
no_build: bool,
cache: &Path,
mut printer: Printer,
) -> Result<ExitStatus> {
@ -46,7 +47,15 @@ pub(crate) async fn pip_sync(
return Ok(ExitStatus::Success);
}
sync_requirements(&requirements, link_mode, index_urls, cache, printer).await
sync_requirements(
&requirements,
link_mode,
index_urls,
no_build,
cache,
printer,
)
.await
}
/// Install a set of locked requirements into the current Python environment.
@ -54,6 +63,7 @@ pub(crate) async fn sync_requirements(
requirements: &[Requirement],
link_mode: LinkMode,
index_urls: Option<IndexUrls>,
no_build: bool,
cache: &Path,
mut printer: Printer,
) -> Result<ExitStatus> {
@ -145,7 +155,8 @@ pub(crate) async fn sync_requirements(
let start = std::time::Instant::now();
let downloader = puffin_installer::Downloader::new(&client, cache)
.with_reporter(DownloadReporter::from(printer).with_length(remote.len() as u64));
.with_reporter(DownloadReporter::from(printer).with_length(remote.len() as u64))
.with_no_build(no_build);
let downloads = downloader
.download(remote)
@ -186,6 +197,7 @@ pub(crate) async fn sync_requirements(
cache.to_path_buf(),
venv.interpreter_info().clone(),
fs::canonicalize(venv.python_executable())?,
no_build,
);
let builder = Builder::new(&build_dispatch)

View file

@ -68,6 +68,7 @@ enum Commands {
}
#[derive(Args)]
#[allow(clippy::struct_excessive_bools)]
struct PipCompileArgs {
/// Include all packages listed in the given `requirements.in` files.
#[clap(required(true))]
@ -110,6 +111,11 @@ struct PipCompileArgs {
/// Allow package upgrades, ignoring pinned versions in the existing output file.
#[clap(long)]
upgrade: bool,
/// Don't build source distributions. This means resolving will not run arbitrary code. The
/// cached wheels of already built source distributions will be reused.
#[clap(long)]
no_build: bool,
}
#[derive(Args)]
@ -133,6 +139,11 @@ struct PipSyncArgs {
/// Ignore the package index, instead relying on local archives and caches.
#[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")]
no_index: bool,
/// Don't build source distributions. This means resolving will not run arbitrary code. The
/// cached wheels of already built source distributions will be reused.
#[clap(long)]
no_build: bool,
}
#[derive(Args)]
@ -237,6 +248,7 @@ async fn inner() -> Result<ExitStatus> {
args.prerelease.unwrap_or_default(),
args.upgrade.into(),
index_urls,
args.no_build,
&cache_dir,
printer,
)
@ -254,6 +266,7 @@ async fn inner() -> Result<ExitStatus> {
&sources,
args.link_mode.unwrap_or_default(),
index_urls,
args.no_build,
&cache_dir,
printer,
)

View file

@ -51,6 +51,7 @@ pub(crate) async fn build(args: BuildArgs) -> Result<PathBuf> {
cache,
venv.interpreter_info().clone(),
fs::canonicalize(venv.python_executable())?,
false,
);
let wheel = build_dispatch
.build_source(&args.sdist, args.subdirectory.as_deref(), &wheel_dir)

View file

@ -20,6 +20,10 @@ pub(crate) struct ResolveCliArgs {
/// Path to the cache directory.
#[arg(global = true, long, env = "PUFFIN_CACHE_DIR")]
cache_dir: Option<PathBuf>,
/// Don't build source distributions. This means resolving will not run arbitrary code. The
/// cached wheels of already built source distributions will be reused.
#[clap(long)]
no_build: bool,
}
pub(crate) async fn resolve_cli(args: ResolveCliArgs) -> anyhow::Result<()> {
@ -37,6 +41,7 @@ pub(crate) async fn resolve_cli(args: ResolveCliArgs) -> anyhow::Result<()> {
cache.clone(),
venv.interpreter_info().clone(),
fs::canonicalize(venv.python_executable())?,
args.no_build,
);
let mut resolution = build_dispatch.resolve(&args.requirements).await?;

View file

@ -28,6 +28,10 @@ pub(crate) struct ResolveManyArgs {
/// Path to the cache directory.
#[arg(global = true, long, env = "PUFFIN_CACHE_DIR")]
cache_dir: Option<PathBuf>,
/// Don't build source distributions. This means resolving will not run arbitrary code. The
/// cached wheels of already built source distributions will be reused.
#[clap(long)]
no_build: bool,
}
pub(crate) async fn resolve_many(args: ResolveManyArgs) -> anyhow::Result<()> {
@ -57,6 +61,7 @@ pub(crate) async fn resolve_many(args: ResolveManyArgs) -> anyhow::Result<()> {
cache.clone(),
venv.interpreter_info().clone(),
fs::canonicalize(venv.python_executable())?,
args.no_build,
);
let build_dispatch_arc = Arc::new(build_dispatch);

View file

@ -6,8 +6,8 @@ use std::future::Future;
use std::path::{Path, PathBuf};
use std::pin::Pin;
use anyhow::Context;
use anyhow::Result;
use anyhow::{bail, Context};
use itertools::{Either, Itertools};
use tracing::{debug, instrument};
@ -28,6 +28,7 @@ pub struct BuildDispatch {
interpreter_info: InterpreterInfo,
base_python: PathBuf,
source_build_context: SourceBuildContext,
no_build: bool,
}
impl BuildDispatch {
@ -36,6 +37,7 @@ impl BuildDispatch {
cache: PathBuf,
interpreter_info: InterpreterInfo,
base_python: PathBuf,
no_build: bool,
) -> Self {
Self {
client,
@ -43,6 +45,7 @@ impl BuildDispatch {
interpreter_info,
base_python,
source_build_context: SourceBuildContext::default(),
no_build,
}
}
}
@ -60,7 +63,11 @@ impl BuildContext for BuildDispatch {
&self.base_python
}
#[instrument(skip(self, requirements))]
fn no_build(&self) -> bool {
self.no_build
}
#[instrument(skip(self, requirements), fields(requirements = requirements.iter().map(ToString::to_string).join(", ")))]
fn resolve<'a>(
&'a self,
requirements: &'a [Requirement],
@ -141,6 +148,7 @@ impl BuildContext for BuildDispatch {
remote.iter().map(ToString::to_string).join(", ")
);
Downloader::new(&self.client, &self.cache)
.with_no_build(self.no_build)
.download(remote)
.await
.context("Failed to download build dependencies")?
@ -230,6 +238,9 @@ impl BuildContext for BuildDispatch {
wheel_dir: &'a Path,
) -> Pin<Box<dyn Future<Output = Result<String>> + Send + 'a>> {
Box::pin(async move {
if self.no_build {
bail!("Building source distributions is disabled");
}
let builder = SourceBuild::setup(
source,
subdirectory,

View file

@ -2,7 +2,7 @@ use std::cmp::Reverse;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use anyhow::Result;
use anyhow::{bail, Result};
use bytesize::ByteSize;
use tokio::task::JoinSet;
use tokio_util::compat::FuturesAsyncReadCompatExt;
@ -23,6 +23,8 @@ pub struct Downloader<'a> {
cache: &'a Path,
locks: Arc<Locks>,
reporter: Option<Box<dyn Reporter>>,
/// Block building source distributions by not downloading them
no_build: bool,
}
impl<'a> Downloader<'a> {
@ -33,6 +35,7 @@ impl<'a> Downloader<'a> {
cache,
locks: Arc::new(Locks::default()),
reporter: None,
no_build: false,
}
}
@ -45,6 +48,12 @@ impl<'a> Downloader<'a> {
}
}
/// Optionally, block downloading source distributions
#[must_use]
pub fn with_no_build(self, no_build: bool) -> Self {
Self { no_build, ..self }
}
/// Download a set of distributions.
pub async fn download(&self, distributions: Vec<RemoteDistribution>) -> Result<Vec<Download>> {
// Sort the distributions by size.
@ -58,6 +67,13 @@ impl<'a> Downloader<'a> {
let mut fetches = JoinSet::new();
let mut downloads = Vec::with_capacity(distributions.len());
for distribution in distributions {
if self.no_build && !distribution.is_wheel() {
bail!(
"Building source distributions is disabled, not downloading {}",
distribution
);
}
fetches.spawn(fetch_distribution(
distribution.clone(),
self.client.clone(),

View file

@ -5,7 +5,7 @@
use std::str::FromStr;
use std::sync::Arc;
use anyhow::Result;
use anyhow::{bail, Result};
use fs_err::tokio as fs;
use tempfile::tempdir_in;
@ -76,6 +76,10 @@ impl<'a, T: BuildContext> SourceDistributionFetcher<'a, T> {
) -> Result<Metadata21> {
debug!("Building: {distribution}");
if self.build_context.no_build() {
bail!("Building source distributions is disabled");
}
// This could extract the subdirectory.
let source = Source::try_from(distribution)?;
let (sdist_file, subdirectory) = match source {

View file

@ -57,6 +57,13 @@ pub trait BuildContext {
/// The system (or conda) python interpreter to create venvs.
fn base_python(&self) -> &Path;
/// Whether source distribution building is disabled. This [`BuildContext::build_source`] calls
/// will fail in this case. This method exists to avoid fetching source distributions if we know
/// we can't build them
fn no_build(&self) -> bool {
false
}
/// Resolve the given requirements into a ready-to-install set of package versions.
fn resolve<'a>(
&'a self,