mirror of
https://github.com/astral-sh/uv.git
synced 2025-07-07 21:35:00 +00:00
Add --no-build
(#358)
By default, we will build source distributions for both resolving and installing, running arbitrary code. `--no-build` adds an option to ban this and only install from wheels, no source distributions or git builds allowed. We also don't fetch these and instead report immediately. I've heard from users for whom this is a requirement, i'm implementing it now because it's helpful for testing. I'm thinking about adding a shared `PuffinSharedArgs` struct so we don't have to repeat each option everywhere.
This commit is contained in:
parent
4fe583257e
commit
2ebe40b986
10 changed files with 82 additions and 6 deletions
|
@ -39,6 +39,7 @@ pub(crate) async fn pip_compile(
|
||||||
prerelease_mode: PreReleaseMode,
|
prerelease_mode: PreReleaseMode,
|
||||||
upgrade_mode: UpgradeMode,
|
upgrade_mode: UpgradeMode,
|
||||||
index_urls: Option<IndexUrls>,
|
index_urls: Option<IndexUrls>,
|
||||||
|
no_build: bool,
|
||||||
cache: &Path,
|
cache: &Path,
|
||||||
mut printer: Printer,
|
mut printer: Printer,
|
||||||
) -> Result<ExitStatus> {
|
) -> Result<ExitStatus> {
|
||||||
|
@ -137,6 +138,7 @@ pub(crate) async fn pip_compile(
|
||||||
cache.to_path_buf(),
|
cache.to_path_buf(),
|
||||||
venv.interpreter_info().clone(),
|
venv.interpreter_info().clone(),
|
||||||
fs::canonicalize(venv.python_executable())?,
|
fs::canonicalize(venv.python_executable())?,
|
||||||
|
no_build,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Resolve the dependencies.
|
// Resolve the dependencies.
|
||||||
|
|
|
@ -30,6 +30,7 @@ pub(crate) async fn pip_sync(
|
||||||
sources: &[RequirementsSource],
|
sources: &[RequirementsSource],
|
||||||
link_mode: LinkMode,
|
link_mode: LinkMode,
|
||||||
index_urls: Option<IndexUrls>,
|
index_urls: Option<IndexUrls>,
|
||||||
|
no_build: bool,
|
||||||
cache: &Path,
|
cache: &Path,
|
||||||
mut printer: Printer,
|
mut printer: Printer,
|
||||||
) -> Result<ExitStatus> {
|
) -> Result<ExitStatus> {
|
||||||
|
@ -46,7 +47,15 @@ pub(crate) async fn pip_sync(
|
||||||
return Ok(ExitStatus::Success);
|
return Ok(ExitStatus::Success);
|
||||||
}
|
}
|
||||||
|
|
||||||
sync_requirements(&requirements, link_mode, index_urls, cache, printer).await
|
sync_requirements(
|
||||||
|
&requirements,
|
||||||
|
link_mode,
|
||||||
|
index_urls,
|
||||||
|
no_build,
|
||||||
|
cache,
|
||||||
|
printer,
|
||||||
|
)
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Install a set of locked requirements into the current Python environment.
|
/// Install a set of locked requirements into the current Python environment.
|
||||||
|
@ -54,6 +63,7 @@ pub(crate) async fn sync_requirements(
|
||||||
requirements: &[Requirement],
|
requirements: &[Requirement],
|
||||||
link_mode: LinkMode,
|
link_mode: LinkMode,
|
||||||
index_urls: Option<IndexUrls>,
|
index_urls: Option<IndexUrls>,
|
||||||
|
no_build: bool,
|
||||||
cache: &Path,
|
cache: &Path,
|
||||||
mut printer: Printer,
|
mut printer: Printer,
|
||||||
) -> Result<ExitStatus> {
|
) -> Result<ExitStatus> {
|
||||||
|
@ -145,7 +155,8 @@ pub(crate) async fn sync_requirements(
|
||||||
let start = std::time::Instant::now();
|
let start = std::time::Instant::now();
|
||||||
|
|
||||||
let downloader = puffin_installer::Downloader::new(&client, cache)
|
let downloader = puffin_installer::Downloader::new(&client, cache)
|
||||||
.with_reporter(DownloadReporter::from(printer).with_length(remote.len() as u64));
|
.with_reporter(DownloadReporter::from(printer).with_length(remote.len() as u64))
|
||||||
|
.with_no_build(no_build);
|
||||||
|
|
||||||
let downloads = downloader
|
let downloads = downloader
|
||||||
.download(remote)
|
.download(remote)
|
||||||
|
@ -186,6 +197,7 @@ pub(crate) async fn sync_requirements(
|
||||||
cache.to_path_buf(),
|
cache.to_path_buf(),
|
||||||
venv.interpreter_info().clone(),
|
venv.interpreter_info().clone(),
|
||||||
fs::canonicalize(venv.python_executable())?,
|
fs::canonicalize(venv.python_executable())?,
|
||||||
|
no_build,
|
||||||
);
|
);
|
||||||
|
|
||||||
let builder = Builder::new(&build_dispatch)
|
let builder = Builder::new(&build_dispatch)
|
||||||
|
|
|
@ -68,6 +68,7 @@ enum Commands {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Args)]
|
#[derive(Args)]
|
||||||
|
#[allow(clippy::struct_excessive_bools)]
|
||||||
struct PipCompileArgs {
|
struct PipCompileArgs {
|
||||||
/// Include all packages listed in the given `requirements.in` files.
|
/// Include all packages listed in the given `requirements.in` files.
|
||||||
#[clap(required(true))]
|
#[clap(required(true))]
|
||||||
|
@ -110,6 +111,11 @@ struct PipCompileArgs {
|
||||||
/// Allow package upgrades, ignoring pinned versions in the existing output file.
|
/// Allow package upgrades, ignoring pinned versions in the existing output file.
|
||||||
#[clap(long)]
|
#[clap(long)]
|
||||||
upgrade: bool,
|
upgrade: bool,
|
||||||
|
|
||||||
|
/// Don't build source distributions. This means resolving will not run arbitrary code. The
|
||||||
|
/// cached wheels of already built source distributions will be reused.
|
||||||
|
#[clap(long)]
|
||||||
|
no_build: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Args)]
|
#[derive(Args)]
|
||||||
|
@ -133,6 +139,11 @@ struct PipSyncArgs {
|
||||||
/// Ignore the package index, instead relying on local archives and caches.
|
/// Ignore the package index, instead relying on local archives and caches.
|
||||||
#[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")]
|
#[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")]
|
||||||
no_index: bool,
|
no_index: bool,
|
||||||
|
|
||||||
|
/// Don't build source distributions. This means resolving will not run arbitrary code. The
|
||||||
|
/// cached wheels of already built source distributions will be reused.
|
||||||
|
#[clap(long)]
|
||||||
|
no_build: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Args)]
|
#[derive(Args)]
|
||||||
|
@ -237,6 +248,7 @@ async fn inner() -> Result<ExitStatus> {
|
||||||
args.prerelease.unwrap_or_default(),
|
args.prerelease.unwrap_or_default(),
|
||||||
args.upgrade.into(),
|
args.upgrade.into(),
|
||||||
index_urls,
|
index_urls,
|
||||||
|
args.no_build,
|
||||||
&cache_dir,
|
&cache_dir,
|
||||||
printer,
|
printer,
|
||||||
)
|
)
|
||||||
|
@ -254,6 +266,7 @@ async fn inner() -> Result<ExitStatus> {
|
||||||
&sources,
|
&sources,
|
||||||
args.link_mode.unwrap_or_default(),
|
args.link_mode.unwrap_or_default(),
|
||||||
index_urls,
|
index_urls,
|
||||||
|
args.no_build,
|
||||||
&cache_dir,
|
&cache_dir,
|
||||||
printer,
|
printer,
|
||||||
)
|
)
|
||||||
|
|
|
@ -51,6 +51,7 @@ pub(crate) async fn build(args: BuildArgs) -> Result<PathBuf> {
|
||||||
cache,
|
cache,
|
||||||
venv.interpreter_info().clone(),
|
venv.interpreter_info().clone(),
|
||||||
fs::canonicalize(venv.python_executable())?,
|
fs::canonicalize(venv.python_executable())?,
|
||||||
|
false,
|
||||||
);
|
);
|
||||||
let wheel = build_dispatch
|
let wheel = build_dispatch
|
||||||
.build_source(&args.sdist, args.subdirectory.as_deref(), &wheel_dir)
|
.build_source(&args.sdist, args.subdirectory.as_deref(), &wheel_dir)
|
||||||
|
|
|
@ -20,6 +20,10 @@ pub(crate) struct ResolveCliArgs {
|
||||||
/// Path to the cache directory.
|
/// Path to the cache directory.
|
||||||
#[arg(global = true, long, env = "PUFFIN_CACHE_DIR")]
|
#[arg(global = true, long, env = "PUFFIN_CACHE_DIR")]
|
||||||
cache_dir: Option<PathBuf>,
|
cache_dir: Option<PathBuf>,
|
||||||
|
/// Don't build source distributions. This means resolving will not run arbitrary code. The
|
||||||
|
/// cached wheels of already built source distributions will be reused.
|
||||||
|
#[clap(long)]
|
||||||
|
no_build: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn resolve_cli(args: ResolveCliArgs) -> anyhow::Result<()> {
|
pub(crate) async fn resolve_cli(args: ResolveCliArgs) -> anyhow::Result<()> {
|
||||||
|
@ -37,6 +41,7 @@ pub(crate) async fn resolve_cli(args: ResolveCliArgs) -> anyhow::Result<()> {
|
||||||
cache.clone(),
|
cache.clone(),
|
||||||
venv.interpreter_info().clone(),
|
venv.interpreter_info().clone(),
|
||||||
fs::canonicalize(venv.python_executable())?,
|
fs::canonicalize(venv.python_executable())?,
|
||||||
|
args.no_build,
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut resolution = build_dispatch.resolve(&args.requirements).await?;
|
let mut resolution = build_dispatch.resolve(&args.requirements).await?;
|
||||||
|
|
|
@ -28,6 +28,10 @@ pub(crate) struct ResolveManyArgs {
|
||||||
/// Path to the cache directory.
|
/// Path to the cache directory.
|
||||||
#[arg(global = true, long, env = "PUFFIN_CACHE_DIR")]
|
#[arg(global = true, long, env = "PUFFIN_CACHE_DIR")]
|
||||||
cache_dir: Option<PathBuf>,
|
cache_dir: Option<PathBuf>,
|
||||||
|
/// Don't build source distributions. This means resolving will not run arbitrary code. The
|
||||||
|
/// cached wheels of already built source distributions will be reused.
|
||||||
|
#[clap(long)]
|
||||||
|
no_build: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn resolve_many(args: ResolveManyArgs) -> anyhow::Result<()> {
|
pub(crate) async fn resolve_many(args: ResolveManyArgs) -> anyhow::Result<()> {
|
||||||
|
@ -57,6 +61,7 @@ pub(crate) async fn resolve_many(args: ResolveManyArgs) -> anyhow::Result<()> {
|
||||||
cache.clone(),
|
cache.clone(),
|
||||||
venv.interpreter_info().clone(),
|
venv.interpreter_info().clone(),
|
||||||
fs::canonicalize(venv.python_executable())?,
|
fs::canonicalize(venv.python_executable())?,
|
||||||
|
args.no_build,
|
||||||
);
|
);
|
||||||
|
|
||||||
let build_dispatch_arc = Arc::new(build_dispatch);
|
let build_dispatch_arc = Arc::new(build_dispatch);
|
||||||
|
|
|
@ -6,8 +6,8 @@ use std::future::Future;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::pin::Pin;
|
use std::pin::Pin;
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
use anyhow::{bail, Context};
|
||||||
use itertools::{Either, Itertools};
|
use itertools::{Either, Itertools};
|
||||||
use tracing::{debug, instrument};
|
use tracing::{debug, instrument};
|
||||||
|
|
||||||
|
@ -28,6 +28,7 @@ pub struct BuildDispatch {
|
||||||
interpreter_info: InterpreterInfo,
|
interpreter_info: InterpreterInfo,
|
||||||
base_python: PathBuf,
|
base_python: PathBuf,
|
||||||
source_build_context: SourceBuildContext,
|
source_build_context: SourceBuildContext,
|
||||||
|
no_build: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BuildDispatch {
|
impl BuildDispatch {
|
||||||
|
@ -36,6 +37,7 @@ impl BuildDispatch {
|
||||||
cache: PathBuf,
|
cache: PathBuf,
|
||||||
interpreter_info: InterpreterInfo,
|
interpreter_info: InterpreterInfo,
|
||||||
base_python: PathBuf,
|
base_python: PathBuf,
|
||||||
|
no_build: bool,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
client,
|
client,
|
||||||
|
@ -43,6 +45,7 @@ impl BuildDispatch {
|
||||||
interpreter_info,
|
interpreter_info,
|
||||||
base_python,
|
base_python,
|
||||||
source_build_context: SourceBuildContext::default(),
|
source_build_context: SourceBuildContext::default(),
|
||||||
|
no_build,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -60,7 +63,11 @@ impl BuildContext for BuildDispatch {
|
||||||
&self.base_python
|
&self.base_python
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(self, requirements))]
|
fn no_build(&self) -> bool {
|
||||||
|
self.no_build
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip(self, requirements), fields(requirements = requirements.iter().map(ToString::to_string).join(", ")))]
|
||||||
fn resolve<'a>(
|
fn resolve<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
requirements: &'a [Requirement],
|
requirements: &'a [Requirement],
|
||||||
|
@ -141,6 +148,7 @@ impl BuildContext for BuildDispatch {
|
||||||
remote.iter().map(ToString::to_string).join(", ")
|
remote.iter().map(ToString::to_string).join(", ")
|
||||||
);
|
);
|
||||||
Downloader::new(&self.client, &self.cache)
|
Downloader::new(&self.client, &self.cache)
|
||||||
|
.with_no_build(self.no_build)
|
||||||
.download(remote)
|
.download(remote)
|
||||||
.await
|
.await
|
||||||
.context("Failed to download build dependencies")?
|
.context("Failed to download build dependencies")?
|
||||||
|
@ -230,6 +238,9 @@ impl BuildContext for BuildDispatch {
|
||||||
wheel_dir: &'a Path,
|
wheel_dir: &'a Path,
|
||||||
) -> Pin<Box<dyn Future<Output = Result<String>> + Send + 'a>> {
|
) -> Pin<Box<dyn Future<Output = Result<String>> + Send + 'a>> {
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
|
if self.no_build {
|
||||||
|
bail!("Building source distributions is disabled");
|
||||||
|
}
|
||||||
let builder = SourceBuild::setup(
|
let builder = SourceBuild::setup(
|
||||||
source,
|
source,
|
||||||
subdirectory,
|
subdirectory,
|
||||||
|
|
|
@ -2,7 +2,7 @@ use std::cmp::Reverse;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::{bail, Result};
|
||||||
use bytesize::ByteSize;
|
use bytesize::ByteSize;
|
||||||
use tokio::task::JoinSet;
|
use tokio::task::JoinSet;
|
||||||
use tokio_util::compat::FuturesAsyncReadCompatExt;
|
use tokio_util::compat::FuturesAsyncReadCompatExt;
|
||||||
|
@ -23,6 +23,8 @@ pub struct Downloader<'a> {
|
||||||
cache: &'a Path,
|
cache: &'a Path,
|
||||||
locks: Arc<Locks>,
|
locks: Arc<Locks>,
|
||||||
reporter: Option<Box<dyn Reporter>>,
|
reporter: Option<Box<dyn Reporter>>,
|
||||||
|
/// Block building source distributions by not downloading them
|
||||||
|
no_build: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Downloader<'a> {
|
impl<'a> Downloader<'a> {
|
||||||
|
@ -33,6 +35,7 @@ impl<'a> Downloader<'a> {
|
||||||
cache,
|
cache,
|
||||||
locks: Arc::new(Locks::default()),
|
locks: Arc::new(Locks::default()),
|
||||||
reporter: None,
|
reporter: None,
|
||||||
|
no_build: false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -45,6 +48,12 @@ impl<'a> Downloader<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Optionally, block downloading source distributions
|
||||||
|
#[must_use]
|
||||||
|
pub fn with_no_build(self, no_build: bool) -> Self {
|
||||||
|
Self { no_build, ..self }
|
||||||
|
}
|
||||||
|
|
||||||
/// Download a set of distributions.
|
/// Download a set of distributions.
|
||||||
pub async fn download(&self, distributions: Vec<RemoteDistribution>) -> Result<Vec<Download>> {
|
pub async fn download(&self, distributions: Vec<RemoteDistribution>) -> Result<Vec<Download>> {
|
||||||
// Sort the distributions by size.
|
// Sort the distributions by size.
|
||||||
|
@ -58,6 +67,13 @@ impl<'a> Downloader<'a> {
|
||||||
let mut fetches = JoinSet::new();
|
let mut fetches = JoinSet::new();
|
||||||
let mut downloads = Vec::with_capacity(distributions.len());
|
let mut downloads = Vec::with_capacity(distributions.len());
|
||||||
for distribution in distributions {
|
for distribution in distributions {
|
||||||
|
if self.no_build && !distribution.is_wheel() {
|
||||||
|
bail!(
|
||||||
|
"Building source distributions is disabled, not downloading {}",
|
||||||
|
distribution
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
fetches.spawn(fetch_distribution(
|
fetches.spawn(fetch_distribution(
|
||||||
distribution.clone(),
|
distribution.clone(),
|
||||||
self.client.clone(),
|
self.client.clone(),
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::{bail, Result};
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
|
|
||||||
use tempfile::tempdir_in;
|
use tempfile::tempdir_in;
|
||||||
|
@ -76,6 +76,10 @@ impl<'a, T: BuildContext> SourceDistributionFetcher<'a, T> {
|
||||||
) -> Result<Metadata21> {
|
) -> Result<Metadata21> {
|
||||||
debug!("Building: {distribution}");
|
debug!("Building: {distribution}");
|
||||||
|
|
||||||
|
if self.build_context.no_build() {
|
||||||
|
bail!("Building source distributions is disabled");
|
||||||
|
}
|
||||||
|
|
||||||
// This could extract the subdirectory.
|
// This could extract the subdirectory.
|
||||||
let source = Source::try_from(distribution)?;
|
let source = Source::try_from(distribution)?;
|
||||||
let (sdist_file, subdirectory) = match source {
|
let (sdist_file, subdirectory) = match source {
|
||||||
|
|
|
@ -57,6 +57,13 @@ pub trait BuildContext {
|
||||||
/// The system (or conda) python interpreter to create venvs.
|
/// The system (or conda) python interpreter to create venvs.
|
||||||
fn base_python(&self) -> &Path;
|
fn base_python(&self) -> &Path;
|
||||||
|
|
||||||
|
/// Whether source distribution building is disabled. This [`BuildContext::build_source`] calls
|
||||||
|
/// will fail in this case. This method exists to avoid fetching source distributions if we know
|
||||||
|
/// we can't build them
|
||||||
|
fn no_build(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
/// Resolve the given requirements into a ready-to-install set of package versions.
|
/// Resolve the given requirements into a ready-to-install set of package versions.
|
||||||
fn resolve<'a>(
|
fn resolve<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue