Add uv export --format requirements.txt (#6778)

## Summary

The interface here is intentionally a bit more limited than `uv pip
compile`, because we don't want `requirements.txt` to be a system of
record -- it's just an export format. So, we don't write annotation
comments (i.e., which dependency is requested from which), we don't
allow writing extras, etc. It's just a flat list of requirements, with
their markers and hashes.

Closes #6007.

Closes #6668.

Closes #6670.
This commit is contained in:
Charlie Marsh 2024-08-29 13:46:42 -04:00 committed by GitHub
parent 670e9603ee
commit cbfc928a9c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
21 changed files with 1610 additions and 105 deletions

View file

@ -12,8 +12,8 @@ use pep508_rs::Requirement;
use pypi_types::VerbatimParsedUrl;
use uv_cache::CacheArgs;
use uv_configuration::{
ConfigSettingEntry, IndexStrategy, KeyringProviderType, PackageNameSpecifier, TargetTriple,
TrustedHost,
ConfigSettingEntry, ExportFormat, IndexStrategy, KeyringProviderType, PackageNameSpecifier,
TargetTriple, TrustedHost,
};
use uv_normalize::{ExtraName, PackageName};
use uv_python::{PythonDownloads, PythonPreference, PythonVersion};
@ -654,6 +654,23 @@ pub enum ProjectCommand {
after_long_help = ""
)]
Lock(LockArgs),
/// Export the project's lockfile to an alternate format.
///
/// At present, only `requirements-txt` is supported.
///
/// The project is re-locked before exporting unless the `--locked` or `--frozen` flag is
/// provided.
///
/// uv will search for a project in the current directory or any parent directory. If a project
/// cannot be found, uv will exit with an error.
///
/// If operating in a workspace, the root will be exported by default; however, a specific
/// member can be selected using the `--package` option.
#[command(
after_help = "Use `uv help export` for more details.",
after_long_help = ""
)]
Export(ExportArgs),
/// Display the project's dependency tree.
Tree(TreeArgs),
}
@ -2290,8 +2307,7 @@ pub struct RunArgs {
/// Run the command in a specific package in the workspace.
///
/// If not in a workspace, or if the workspace member does not exist, uv
/// will exit with an error.
/// If the workspace member does not exist, uv will exit with an error.
#[arg(long)]
pub package: Option<PackageName>,
@ -2422,8 +2438,7 @@ pub struct SyncArgs {
/// The workspace's environment (`.venv`) is updated to reflect the subset
/// of dependencies declared by the specified workspace member package.
///
/// If not in a workspace, or if the workspace member does not exist, uv
/// will exit with an error.
/// If the workspace member does not exist, uv will exit with an error.
#[arg(long)]
pub package: Option<PackageName>,
@ -2753,6 +2768,84 @@ pub struct TreeArgs {
pub python: Option<String>,
}
#[derive(Args)]
#[allow(clippy::struct_excessive_bools)]
pub struct ExportArgs {
/// The format to which `uv.lock` should be exported.
///
/// At present, only `requirements-txt` is supported.
#[arg(long, value_enum, default_value_t = ExportFormat::default())]
pub format: ExportFormat,
/// Export the dependencies for a specific package in the workspace.
///
/// If the workspace member does not exist, uv will exit with an error.
#[arg(long)]
pub package: Option<PackageName>,
/// Include optional dependencies from the extra group name.
///
/// May be provided more than once.
#[arg(long, conflicts_with = "all_extras", value_parser = extra_name_with_clap_error)]
pub extra: Option<Vec<ExtraName>>,
/// Include all optional dependencies.
#[arg(long, conflicts_with = "extra")]
pub all_extras: bool,
#[arg(long, overrides_with("all_extras"), hide = true)]
pub no_all_extras: bool,
/// Include development dependencies.
#[arg(long, overrides_with("no_dev"), hide = true)]
pub dev: bool,
/// Omit development dependencies.
#[arg(long, overrides_with("dev"))]
pub no_dev: bool,
/// Assert that the `uv.lock` will remain unchanged.
///
/// Requires that the lockfile is up-to-date. If the lockfile is missing or
/// needs to be updated, uv will exit with an error.
#[arg(long, conflicts_with = "frozen")]
pub locked: bool,
/// Do not update the `uv.lock` before exporting.
///
/// If a `uv.lock` does not exist, uv will exit with an error.
#[arg(long, conflicts_with = "locked")]
pub frozen: bool,
#[command(flatten)]
pub resolver: ResolverArgs,
#[command(flatten)]
pub build: BuildArgs,
#[command(flatten)]
pub refresh: RefreshArgs,
/// The Python interpreter to use during resolution.
///
/// A Python interpreter is required for building source distributions to
/// determine package metadata when there are not wheels.
///
/// The interpreter is also used as the fallback value for the minimum
/// Python version if `requires-python` is not set.
///
/// See `uv help python` for details on Python discovery and supported
/// request formats.
#[arg(
long,
short,
env = "UV_PYTHON",
verbatim_doc_comment,
help_heading = "Python options"
)]
pub python: Option<String>,
}
#[derive(Args)]
#[allow(clippy::struct_excessive_bools)]
pub struct ToolNamespace {

View file

@ -0,0 +1,10 @@
/// The format to use when exporting a `uv.lock` file.
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
#[cfg_attr(feature = "clap", derive(clap::ValueEnum))]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub enum ExportFormat {
/// Export in `requirements.txt` format.
#[default]
RequirementsTxt,
}

View file

@ -3,6 +3,7 @@ pub use build_options::*;
pub use concurrency::*;
pub use config_settings::*;
pub use constraints::*;
pub use export_format::*;
pub use extras::*;
pub use hash::*;
pub use install_options::*;
@ -19,6 +20,7 @@ mod build_options;
mod concurrency;
mod config_settings;
mod constraints;
mod export_format;
mod extras;
mod hash;
mod install_options;

View file

@ -0,0 +1,81 @@
use pep508_rs::MarkerTree;
use petgraph::algo::greedy_feedback_arc_set;
use petgraph::visit::{EdgeRef, Topo};
use petgraph::{Directed, Direction, Graph};
/// A trait for a graph node that can be annotated with a [`MarkerTree`].
pub(crate) trait Markers {
fn set_markers(&mut self, markers: MarkerTree);
}
/// Propagate the [`MarkerTree`] qualifiers across the graph.
///
/// The graph is directed, so if any edge contains a marker, we need to propagate it to all
/// downstream nodes.
pub(crate) fn propagate_markers<T: Markers>(
mut graph: Graph<T, MarkerTree, Directed>,
) -> Graph<T, MarkerTree, Directed> {
// Remove any cycles. By absorption, it should be fine to ignore cycles.
//
// Imagine a graph: `A -> B -> C -> A`. Assume that `A` has weight `1`, `B` has weight `2`,
// and `C` has weight `3`. The weights are the marker trees.
//
// When propagating, we'd return to `A` when we hit the cycle, to create `1 or (1 and 2 and 3)`,
// which resolves to `1`.
//
// TODO(charlie): The above reasoning could be incorrect. Consider using a graph algorithm that
// can handle weight propagation with cycles.
let edges = {
let mut fas = greedy_feedback_arc_set(&graph)
.map(|edge| edge.id())
.collect::<Vec<_>>();
fas.sort_unstable();
let mut edges = Vec::with_capacity(fas.len());
for edge_id in fas.into_iter().rev() {
edges.push(graph.edge_endpoints(edge_id).unwrap());
graph.remove_edge(edge_id);
}
edges
};
let mut topo = Topo::new(&graph);
while let Some(index) = topo.next(&graph) {
let marker_tree = {
// Fold over the edges to combine the marker trees. If any edge is `None`, then
// the combined marker tree is `None`.
let mut edges = graph.edges_directed(index, Direction::Incoming);
edges
.next()
.and_then(|edge| graph.edge_weight(edge.id()).cloned())
.and_then(|initial| {
edges.try_fold(initial, |mut acc, edge| {
acc.or(graph.edge_weight(edge.id())?.clone());
Some(acc)
})
})
.unwrap_or_default()
};
// Propagate the marker tree to all downstream nodes.
let mut walker = graph
.neighbors_directed(index, Direction::Outgoing)
.detach();
while let Some((outgoing, _)) = walker.next(&graph) {
if let Some(weight) = graph.edge_weight_mut(outgoing) {
weight.and(marker_tree.clone());
}
}
let node = &mut graph[index];
node.set_markers(marker_tree);
}
// Re-add the removed edges. We no longer care about the edge _weights_, but we do want the
// edges to be present, to power the `# via` annotations.
for (source, target) in edges {
graph.add_edge(source, target, MarkerTree::TRUE);
}
graph
}

View file

@ -3,7 +3,9 @@ pub use error::{NoSolutionError, NoSolutionHeader, ResolveError};
pub use exclude_newer::ExcludeNewer;
pub use exclusions::Exclusions;
pub use flat_index::FlatIndex;
pub use lock::{Lock, LockError, ResolverManifest, SatisfiesResult, TreeDisplay};
pub use lock::{
Lock, LockError, RequirementsTxtExport, ResolverManifest, SatisfiesResult, TreeDisplay,
};
pub use manifest::Manifest;
pub use options::{Options, OptionsBuilder};
pub use preferences::{Preference, PreferenceError, Preferences};
@ -31,6 +33,7 @@ mod exclude_newer;
mod exclusions;
mod flat_index;
mod fork_urls;
mod graph_ops;
mod lock;
mod manifest;
mod marker;

View file

@ -37,10 +37,12 @@ use uv_normalize::{ExtraName, GroupName, PackageName};
use uv_types::BuildContext;
use uv_workspace::{VirtualProject, Workspace};
pub use crate::lock::requirements_txt::RequirementsTxtExport;
pub use crate::lock::tree::TreeDisplay;
use crate::resolution::{AnnotatedDist, ResolutionGraphNode};
use crate::{ExcludeNewer, PrereleaseMode, RequiresPython, ResolutionGraph, ResolutionMode};
mod requirements_txt;
mod tree;
/// The current version of the lockfile format.

View file

@ -0,0 +1,257 @@
use std::collections::hash_map::Entry;
use std::collections::VecDeque;
use std::fmt::Formatter;
use std::path::{Path, PathBuf};
use either::Either;
use petgraph::{Directed, Graph};
use rustc_hash::{FxHashMap, FxHashSet};
use url::Url;
use distribution_filename::{DistExtension, SourceDistExtension};
use pep508_rs::MarkerTree;
use pypi_types::{ParsedArchiveUrl, ParsedGitUrl};
use uv_configuration::ExtrasSpecification;
use uv_fs::Simplified;
use uv_git::GitReference;
use uv_normalize::{ExtraName, GroupName, PackageName};
use crate::graph_ops::{propagate_markers, Markers};
use crate::lock::{Package, PackageId, Source};
use crate::{Lock, LockError};
type LockGraph<'lock> = Graph<Node<'lock>, Edge, Directed>;
/// An export of a [`Lock`] that renders in `requirements.txt` format.
#[derive(Debug)]
pub struct RequirementsTxtExport<'lock>(LockGraph<'lock>);
impl<'lock> RequirementsTxtExport<'lock> {
pub fn from_lock(
lock: &'lock Lock,
root_name: &PackageName,
extras: &ExtrasSpecification,
dev: &[GroupName],
) -> Result<Self, LockError> {
let size_guess = lock.packages.len();
let mut petgraph = LockGraph::with_capacity(size_guess, size_guess);
let mut queue: VecDeque<(&Package, Option<&ExtraName>)> = VecDeque::new();
let mut inverse = FxHashMap::default();
// Add the workspace package to the queue.
let root = lock
.find_by_name(root_name)
.expect("found too many packages matching root")
.expect("could not find root");
// Add the base package.
queue.push_back((root, None));
// Add any extras.
match extras {
ExtrasSpecification::None => {}
ExtrasSpecification::All => {
for extra in root.optional_dependencies.keys() {
queue.push_back((root, Some(extra)));
}
}
ExtrasSpecification::Some(extras) => {
for extra in extras {
queue.push_back((root, Some(extra)));
}
}
}
// Add the root package to the graph.
inverse.insert(&root.id, petgraph.add_node(Node::from_package(root)));
// Create all the relevant nodes.
let mut seen = FxHashSet::default();
while let Some((package, extra)) = queue.pop_front() {
let index = inverse[&package.id];
let deps = if let Some(extra) = extra {
Either::Left(
package
.optional_dependencies
.get(extra)
.into_iter()
.flatten(),
)
} else {
Either::Right(package.dependencies.iter().chain(
dev.iter().flat_map(|group| {
package.dev_dependencies.get(group).into_iter().flatten()
}),
))
};
for dep in deps {
let dep_dist = lock.find_by_id(&dep.package_id);
// Add the dependency to the graph.
if let Entry::Vacant(entry) = inverse.entry(&dep.package_id) {
entry.insert(petgraph.add_node(Node::from_package(dep_dist)));
}
// Add the edge.
let dep_index = inverse[&dep.package_id];
petgraph.add_edge(index, dep_index, dep.marker.clone());
// Push its dependencies on the queue.
if seen.insert((&dep.package_id, None)) {
queue.push_back((dep_dist, None));
}
for extra in &dep.extra {
if seen.insert((&dep.package_id, Some(extra))) {
queue.push_back((dep_dist, Some(extra)));
}
}
}
}
let graph = propagate_markers(petgraph);
Ok(Self(graph))
}
}
impl std::fmt::Display for RequirementsTxtExport<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
// Collect all packages.
let mut nodes = self
.0
.raw_nodes()
.iter()
.map(|node| &node.weight)
.collect::<Vec<_>>();
// Sort the nodes, such that unnamed URLs (editables) appear at the top.
nodes.sort_unstable_by(|a, b| {
NodeComparator::from(a.package).cmp(&NodeComparator::from(b.package))
});
// Write out each node.
for node in nodes {
let Node { package, markers } = node;
match &package.id.source {
Source::Registry(_) => {
write!(f, "{}=={}", package.id.name, package.id.version)?;
}
Source::Git(url, git) => {
// Remove the fragment and query from the URL; they're already present in the
// `GitSource`.
let mut url = url.to_url();
url.set_fragment(None);
url.set_query(None);
// Reconstruct the `GitUrl` from the `GitSource`.
let git_url = uv_git::GitUrl::from_commit(
url,
GitReference::from(git.kind.clone()),
git.precise,
);
// Reconstruct the PEP 508-compatible URL from the `GitSource`.
let url = Url::from(ParsedGitUrl {
url: git_url.clone(),
subdirectory: git.subdirectory.as_ref().map(PathBuf::from),
});
write!(f, "{} @ {}", package.id.name, url)?;
}
Source::Direct(url, direct) => {
let subdirectory = direct.subdirectory.as_ref().map(PathBuf::from);
let url = Url::from(ParsedArchiveUrl {
url: url.to_url(),
subdirectory: subdirectory.clone(),
ext: DistExtension::Source(SourceDistExtension::TarGz),
});
write!(f, "{} @ {}", package.id.name, url)?;
}
Source::Path(path) | Source::Directory(path) => {
if path.as_os_str().is_empty() {
write!(f, ".")?;
} else if path.is_absolute() {
write!(f, "{}", Url::from_file_path(path).unwrap())?;
} else {
write!(f, "{}", path.portable_display())?;
}
}
Source::Editable(path) => {
if path.as_os_str().is_empty() {
write!(f, "-e .")?;
} else {
write!(f, "-e {}", path.portable_display())?;
}
}
Source::Virtual(_) => {
continue;
}
}
if let Some(contents) = markers.contents() {
write!(f, " ; {contents}")?;
}
let hashes = package.hashes();
if !hashes.is_empty() {
for hash in &hashes {
writeln!(f, " \\")?;
write!(f, " --hash=")?;
write!(f, "{hash}")?;
}
}
writeln!(f)?;
}
Ok(())
}
}
/// The nodes of the [`LockGraph`].
#[derive(Debug)]
struct Node<'lock> {
package: &'lock Package,
markers: MarkerTree,
}
impl<'lock> Node<'lock> {
/// Construct a [`Node`] from a [`Package`].
fn from_package(package: &'lock Package) -> Self {
Self {
package,
markers: MarkerTree::default(),
}
}
}
impl Markers for Node<'_> {
fn set_markers(&mut self, markers: MarkerTree) {
self.markers = markers;
}
}
/// The edges of the [`LockGraph`].
type Edge = MarkerTree;
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
enum NodeComparator<'lock> {
Editable(&'lock Path),
Path(&'lock Path),
Package(&'lock PackageId),
}
impl<'lock> From<&'lock Package> for NodeComparator<'lock> {
fn from(value: &'lock Package) -> Self {
match &value.id.source {
Source::Path(path) | Source::Directory(path) => Self::Path(path),
Source::Editable(path) => Self::Editable(path),
_ => Self::Package(&value.id),
}
}
}

View file

@ -1,8 +1,7 @@
use std::collections::BTreeSet;
use owo_colors::OwoColorize;
use petgraph::algo::greedy_feedback_arc_set;
use petgraph::visit::{EdgeRef, Topo};
use petgraph::visit::EdgeRef;
use petgraph::Direction;
use rustc_hash::{FxBuildHasher, FxHashMap};
@ -10,6 +9,7 @@ use distribution_types::{DistributionMetadata, Name, SourceAnnotation, SourceAnn
use pep508_rs::MarkerTree;
use uv_normalize::PackageName;
use crate::graph_ops::{propagate_markers, Markers};
use crate::resolution::{RequirementsTxtDist, ResolutionGraphNode};
use crate::{ResolutionGraph, ResolverMarkers};
@ -49,6 +49,14 @@ enum DisplayResolutionGraphNode {
Dist(RequirementsTxtDist),
}
impl Markers for DisplayResolutionGraphNode {
fn set_markers(&mut self, markers: MarkerTree) {
if let DisplayResolutionGraphNode::Dist(node) = self {
node.markers = markers;
}
}
}
impl<'a> From<&'a ResolutionGraph> for DisplayResolutionGraph<'a> {
fn from(resolution: &'a ResolutionGraph) -> Self {
Self::new(
@ -348,77 +356,6 @@ fn to_requirements_txt_graph(graph: &ResolutionPetGraph) -> IntermediatePetGraph
next
}
/// Propagate the [`MarkerTree`] qualifiers across the graph.
///
/// The graph is directed, so if any edge contains a marker, we need to propagate it to all
/// downstream nodes.
fn propagate_markers(mut graph: IntermediatePetGraph) -> IntermediatePetGraph {
// Remove any cycles. By absorption, it should be fine to ignore cycles.
//
// Imagine a graph: `A -> B -> C -> A`. Assume that `A` has weight `1`, `B` has weight `2`,
// and `C` has weight `3`. The weights are the marker trees.
//
// When propagating, we'd return to `A` when we hit the cycle, to create `1 or (1 and 2 and 3)`,
// which resolves to `1`.
//
// TODO(charlie): The above reasoning could be incorrect. Consider using a graph algorithm that
// can handle weight propagation with cycles.
let edges = {
let mut fas = greedy_feedback_arc_set(&graph)
.map(|edge| edge.id())
.collect::<Vec<_>>();
fas.sort_unstable();
let mut edges = Vec::with_capacity(fas.len());
for edge_id in fas.into_iter().rev() {
edges.push(graph.edge_endpoints(edge_id).unwrap());
graph.remove_edge(edge_id);
}
edges
};
let mut topo = Topo::new(&graph);
while let Some(index) = topo.next(&graph) {
let marker_tree: Option<MarkerTree> = {
// Fold over the edges to combine the marker trees. If any edge is `None`, then
// the combined marker tree is `None`.
let mut edges = graph.edges_directed(index, Direction::Incoming);
edges
.next()
.and_then(|edge| graph.edge_weight(edge.id()).cloned())
.and_then(|initial| {
edges.try_fold(initial, |mut acc, edge| {
acc.or(graph.edge_weight(edge.id())?.clone());
Some(acc)
})
})
};
// Propagate the marker tree to all downstream nodes.
if let Some(marker_tree) = marker_tree.as_ref() {
let mut walker = graph
.neighbors_directed(index, Direction::Outgoing)
.detach();
while let Some((outgoing, _)) = walker.next(&graph) {
if let Some(weight) = graph.edge_weight_mut(outgoing) {
weight.and(marker_tree.clone());
}
}
}
if let DisplayResolutionGraphNode::Dist(node) = &mut graph[index] {
node.markers = marker_tree;
};
}
// Re-add the removed edges. We no longer care about the edge _weights_, but we do want the
// edges to be present, to power the `# via` annotations.
for (source, target) in edges {
graph.add_edge(source, target, MarkerTree::TRUE);
}
graph
}
/// Reduce the graph, such that all nodes for a single package are combined, regardless of
/// the extras.
///

View file

@ -33,7 +33,7 @@ pub(crate) type MarkersForDistribution = FxHashMap<(Version, Option<VerbatimUrl>
/// A complete resolution graph in which every node represents a pinned package and every edge
/// represents a dependency between two pinned packages.
#[derive(Debug)]
#[derive(Debug, Default)]
pub struct ResolutionGraph {
/// The underlying graph.
pub(crate) petgraph: Graph<ResolutionGraphNode, MarkerTree, Directed>,

View file

@ -19,7 +19,7 @@ pub(crate) struct RequirementsTxtDist {
pub(crate) version: Version,
pub(crate) extras: Vec<ExtraName>,
pub(crate) hashes: Vec<HashDigest>,
pub(crate) markers: Option<MarkerTree>,
pub(crate) markers: MarkerTree,
}
impl RequirementsTxtDist {
@ -89,11 +89,8 @@ impl RequirementsTxtDist {
}
};
if let Some(given) = given {
return if let Some(markers) = self
.markers
.as_ref()
.filter(|_| include_markers)
.and_then(MarkerTree::contents)
return if let Some(markers) =
self.markers.contents().filter(|_| include_markers)
{
Cow::Owned(format!("{given} ; {markers}"))
} else {
@ -104,12 +101,7 @@ impl RequirementsTxtDist {
}
if self.extras.is_empty() || !include_extras {
if let Some(markers) = self
.markers
.as_ref()
.filter(|_| include_markers)
.and_then(MarkerTree::contents)
{
if let Some(markers) = self.markers.contents().filter(|_| include_markers) {
Cow::Owned(format!("{} ; {}", self.dist.verbatim(), markers))
} else {
self.dist.verbatim()
@ -118,12 +110,7 @@ impl RequirementsTxtDist {
let mut extras = self.extras.clone();
extras.sort_unstable();
extras.dedup();
if let Some(markers) = self
.markers
.as_ref()
.filter(|_| include_markers)
.and_then(MarkerTree::contents)
{
if let Some(markers) = self.markers.contents().filter(|_| include_markers) {
Cow::Owned(format!(
"{}[{}]{} ; {}",
self.name(),
@ -176,7 +163,7 @@ impl From<&AnnotatedDist> for RequirementsTxtDist {
vec![]
},
hashes: annotated.hashes.clone(),
markers: None,
markers: MarkerTree::default(),
}
}
}

View file

@ -19,6 +19,7 @@ pub(crate) use pip::sync::pip_sync;
pub(crate) use pip::tree::pip_tree;
pub(crate) use pip::uninstall::pip_uninstall;
pub(crate) use project::add::add;
pub(crate) use project::export::export;
pub(crate) use project::init::{init, InitProjectKind};
pub(crate) use project::lock::lock;
pub(crate) use project::remove::remove;

View file

@ -0,0 +1,126 @@
use anyhow::{Context, Result};
use owo_colors::OwoColorize;
use uv_cache::Cache;
use uv_client::Connectivity;
use uv_configuration::{Concurrency, ExportFormat, ExtrasSpecification};
use uv_fs::CWD;
use uv_normalize::{PackageName, DEV_DEPENDENCIES};
use uv_python::{PythonDownloads, PythonPreference, PythonRequest};
use uv_resolver::RequirementsTxtExport;
use uv_workspace::{DiscoveryOptions, MemberDiscovery, VirtualProject, Workspace};
use crate::commands::pip::loggers::DefaultResolveLogger;
use crate::commands::project::lock::do_safe_lock;
use crate::commands::project::{FoundInterpreter, ProjectError};
use crate::commands::{pip, ExitStatus};
use crate::printer::Printer;
use crate::settings::ResolverSettings;
/// Export the project's `uv.lock` in an alternate format.
#[allow(clippy::fn_params_excessive_bools)]
pub(crate) async fn export(
format: ExportFormat,
package: Option<PackageName>,
extras: ExtrasSpecification,
dev: bool,
locked: bool,
frozen: bool,
python: Option<String>,
settings: ResolverSettings,
python_preference: PythonPreference,
python_downloads: PythonDownloads,
connectivity: Connectivity,
concurrency: Concurrency,
native_tls: bool,
cache: &Cache,
printer: Printer,
) -> Result<ExitStatus> {
// Identify the project.
let project = if let Some(package) = package {
VirtualProject::Project(
Workspace::discover(&CWD, &DiscoveryOptions::default())
.await?
.with_current_project(package.clone())
.with_context(|| format!("Package `{package}` not found in workspace"))?,
)
} else if frozen {
VirtualProject::discover(
&CWD,
&DiscoveryOptions {
members: MemberDiscovery::None,
..DiscoveryOptions::default()
},
)
.await?
} else {
VirtualProject::discover(&CWD, &DiscoveryOptions::default()).await?
};
let VirtualProject::Project(project) = project else {
return Err(anyhow::anyhow!("Legacy non-project roots are not supported in `uv export`; add a `[project]` table to your `pyproject.toml` to enable exports"));
};
// Find an interpreter for the project
let interpreter = FoundInterpreter::discover(
project.workspace(),
python.as_deref().map(PythonRequest::parse),
python_preference,
python_downloads,
connectivity,
native_tls,
cache,
printer,
)
.await?
.into_interpreter();
// Lock the project.
let lock = match do_safe_lock(
locked,
frozen,
project.workspace(),
&interpreter,
settings.as_ref(),
Box::new(DefaultResolveLogger),
connectivity,
concurrency,
native_tls,
cache,
printer,
)
.await
{
Ok(result) => result.into_lock(),
Err(ProjectError::Operation(pip::operations::Error::Resolve(
uv_resolver::ResolveError::NoSolution(err),
))) => {
let report = miette::Report::msg(format!("{err}")).context(err.header());
anstream::eprint!("{report:?}");
return Ok(ExitStatus::Failure);
}
Err(err) => return Err(err.into()),
};
// Include development dependencies, if requested.
let dev = if dev {
vec![DEV_DEPENDENCIES.clone()]
} else {
vec![]
};
// Generate the export.
match format {
ExportFormat::RequirementsTxt => {
let export =
RequirementsTxtExport::from_lock(&lock, project.project_name(), &extras, &dev)?;
anstream::println!(
"{}",
"# This file was autogenerated via `uv export`.".green()
);
anstream::print!("{export}");
}
}
Ok(ExitStatus::Success)
}

View file

@ -38,6 +38,7 @@ use crate::settings::{InstallerSettingsRef, ResolverInstallerSettings, ResolverS
pub(crate) mod add;
pub(crate) mod environment;
pub(crate) mod export;
pub(crate) mod init;
pub(crate) mod lock;
pub(crate) mod remove;

View file

@ -1,6 +1,7 @@
use anyhow::{Context, Result};
use distribution_types::{Dist, ResolvedDist, SourceDist};
use itertools::Itertools;
use distribution_types::{Dist, ResolvedDist, SourceDist};
use pep508_rs::MarkerTree;
use uv_auth::store_credentials_from_url;
use uv_cache::Cache;

View file

@ -1262,6 +1262,33 @@ async fn run_project(
)
.await
}
ProjectCommand::Export(args) => {
// Resolve the settings from the command-line arguments and workspace configuration.
let args = settings::ExportSettings::resolve(args, filesystem);
show_settings!(args);
// Initialize the cache.
let cache = cache.init()?;
commands::export(
args.format,
args.package,
args.extras,
args.dev,
args.locked,
args.frozen,
args.python,
args.settings,
globals.python_preference,
globals.python_downloads,
globals.connectivity,
globals.concurrency,
globals.native_tls,
&cache,
printer,
)
.await
}
}
}

View file

@ -11,7 +11,7 @@ use pypi_types::{Requirement, SupportedEnvironments};
use uv_cache::{CacheArgs, Refresh};
use uv_cli::{
options::{flag, resolver_installer_options, resolver_options},
ToolUpgradeArgs,
ExportArgs, ToolUpgradeArgs,
};
use uv_cli::{
AddArgs, ColorChoice, ExternalCommand, GlobalArgs, InitArgs, ListFormat, LockArgs, Maybe,
@ -22,7 +22,7 @@ use uv_cli::{
};
use uv_client::Connectivity;
use uv_configuration::{
BuildOptions, Concurrency, ConfigSettings, ExtrasSpecification, HashCheckingMode,
BuildOptions, Concurrency, ConfigSettings, ExportFormat, ExtrasSpecification, HashCheckingMode,
IndexStrategy, InstallOptions, KeyringProviderType, NoBinary, NoBuild, PreviewMode, Reinstall,
SourceStrategy, TargetTriple, TrustedHost, Upgrade,
};
@ -935,6 +935,59 @@ impl TreeSettings {
}
}
}
/// The resolved settings to use for an `export` invocation.
#[allow(clippy::struct_excessive_bools, dead_code)]
#[derive(Debug, Clone)]
pub(crate) struct ExportSettings {
pub(crate) format: ExportFormat,
pub(crate) package: Option<PackageName>,
pub(crate) extras: ExtrasSpecification,
pub(crate) dev: bool,
pub(crate) locked: bool,
pub(crate) frozen: bool,
pub(crate) python: Option<String>,
pub(crate) refresh: Refresh,
pub(crate) settings: ResolverSettings,
}
impl ExportSettings {
/// Resolve the [`ExportSettings`] from the CLI and filesystem configuration.
#[allow(clippy::needless_pass_by_value)]
pub(crate) fn resolve(args: ExportArgs, filesystem: Option<FilesystemOptions>) -> Self {
let ExportArgs {
format,
package,
extra,
all_extras,
no_all_extras,
dev,
no_dev,
locked,
frozen,
resolver,
build,
refresh,
python,
} = args;
Self {
package,
format,
extras: ExtrasSpecification::from_args(
flag(all_extras, no_all_extras).unwrap_or_default(),
extra.unwrap_or_default(),
),
dev: flag(dev, no_dev).unwrap_or(true),
locked,
frozen,
python,
refresh: Refresh::from(refresh),
settings: ResolverSettings::combine(resolver_options(resolver, build), filesystem),
}
}
}
/// The resolved settings to use for a `pip compile` invocation.
#[allow(clippy::struct_excessive_bools)]
#[derive(Debug, Clone)]

View file

@ -492,6 +492,14 @@ impl TestContext {
command
}
/// Create a `uv export` command with options shared across scenarios.
pub fn export(&self) -> Command {
let mut command = Command::new(get_bin());
command.arg("export");
self.add_shared_args(&mut command);
command
}
/// Create a `uv python find` command with options shared across scenarios.
pub fn python_find(&self) -> Command {
let mut command = Command::new(get_bin());

634
crates/uv/tests/export.rs Normal file
View file

@ -0,0 +1,634 @@
#![cfg(all(feature = "python", feature = "pypi"))]
#![allow(clippy::disallowed_types)]
use anyhow::Result;
use assert_cmd::assert::OutputAssertExt;
use assert_fs::prelude::*;
use common::{uv_snapshot, TestContext};
use std::process::Stdio;
mod common;
#[test]
fn dependency() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio==3.7.0"]
[build-system]
requires = ["setuptools>=42", "wheel"]
build-backend = "setuptools.build_meta"
"#,
)?;
context.lock().assert().success();
uv_snapshot!(context.filters(), context.export(), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated via `uv export`.
-e .
anyio==3.7.0 \
--hash=sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce \
--hash=sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
sniffio==1.3.1 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2
----- stderr -----
Resolved 4 packages in [TIME]
"###);
Ok(())
}
#[test]
fn dependency_extra() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["flask[dotenv]"]
[build-system]
requires = ["setuptools>=42", "wheel"]
build-backend = "setuptools.build_meta"
"#,
)?;
context.lock().assert().success();
uv_snapshot!(context.filters(), context.export(), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated via `uv export`.
-e .
blinker==1.7.0 \
--hash=sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182 \
--hash=sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9
click==8.1.7 \
--hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de \
--hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28
colorama==0.4.6 ; platform_system == 'Windows' \
--hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
--hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
flask==3.0.2 \
--hash=sha256:822c03f4b799204250a7ee84b1eddc40665395333973dfb9deebfe425fefcb7d \
--hash=sha256:3232e0e9c850d781933cf0207523d1ece087eb8d87b23777ae38456e2fbe7c6e
itsdangerous==2.1.2 \
--hash=sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a \
--hash=sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44
jinja2==3.1.3 \
--hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 \
--hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa
markupsafe==2.1.5 \
--hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \
--hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \
--hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \
--hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \
--hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \
--hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \
--hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \
--hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \
--hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \
--hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \
--hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb
python-dotenv==1.0.1 \
--hash=sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca \
--hash=sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a
werkzeug==3.0.1 \
--hash=sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc \
--hash=sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10
----- stderr -----
Resolved 10 packages in [TIME]
"###);
Ok(())
}
#[test]
fn project_extra() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["typing-extensions"]
[project.optional-dependencies]
async = ["anyio==3.7.0"]
pytest = ["iniconfig"]
[build-system]
requires = ["setuptools>=42", "wheel"]
build-backend = "setuptools.build_meta"
"#,
)?;
context.lock().assert().success();
uv_snapshot!(context.filters(), context.export(), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated via `uv export`.
-e .
typing-extensions==4.10.0 \
--hash=sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb \
--hash=sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475
----- stderr -----
Resolved 6 packages in [TIME]
"###);
uv_snapshot!(context.filters(), context.export().arg("--extra").arg("pytest"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated via `uv export`.
-e .
iniconfig==2.0.0 \
--hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \
--hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374
typing-extensions==4.10.0 \
--hash=sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb \
--hash=sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475
----- stderr -----
Resolved 6 packages in [TIME]
"###);
uv_snapshot!(context.filters(), context.export().arg("--all-extras"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated via `uv export`.
-e .
anyio==3.7.0 \
--hash=sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce \
--hash=sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
iniconfig==2.0.0 \
--hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \
--hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374
sniffio==1.3.1 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2
typing-extensions==4.10.0 \
--hash=sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb \
--hash=sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475
----- stderr -----
Resolved 6 packages in [TIME]
"###);
Ok(())
}
#[test]
fn dependency_marker() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio ; sys_platform == 'darwin'", "iniconfig"]
[build-system]
requires = ["setuptools>=42", "wheel"]
build-backend = "setuptools.build_meta"
"#,
)?;
context.lock().assert().success();
uv_snapshot!(context.filters(), context.export(), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated via `uv export`.
-e .
anyio==4.3.0 ; sys_platform == 'darwin' \
--hash=sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6 \
--hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8
idna==3.6 ; sys_platform == 'darwin' \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
iniconfig==2.0.0 \
--hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \
--hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374
sniffio==1.3.1 ; sys_platform == 'darwin' \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2
----- stderr -----
Resolved 5 packages in [TIME]
"###);
Ok(())
}
#[test]
fn dependency_multiple_markers() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.10"
dependencies = [
"trio ; python_version > '3.11'",
"trio ; sys_platform == 'win32'",
]
[build-system]
requires = ["setuptools>=42", "wheel"]
build-backend = "setuptools.build_meta"
"#,
)?;
context.lock().assert().success();
// Note that the `python_version > '3.11'" markers disappear due to `requires-python = ">=3.12"`
uv_snapshot!(context.filters(), context.export(), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated via `uv export`.
-e .
attrs==23.2.0 ; sys_platform == 'win32' or python_full_version >= '3.12' \
--hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \
--hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1
cffi==1.16.0 ; (implementation_name != 'pypy' and os_name == 'nt' and sys_platform == 'win32') or (python_full_version >= '3.12' and implementation_name != 'pypy' and os_name == 'nt') \
--hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \
--hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \
--hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \
--hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \
--hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \
--hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \
--hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \
--hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \
--hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \
--hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \
--hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \
--hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \
--hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \
--hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \
--hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \
--hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \
--hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \
--hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \
--hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \
--hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \
--hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \
--hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \
--hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \
--hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \
--hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \
--hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \
--hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \
--hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 \
--hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \
--hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \
--hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \
--hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \
--hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235
exceptiongroup==1.2.0 ; python_full_version < '3.11' and sys_platform == 'win32' \
--hash=sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68 \
--hash=sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14
idna==3.6 ; sys_platform == 'win32' or python_full_version >= '3.12' \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
outcome==1.3.0.post0 ; sys_platform == 'win32' or python_full_version >= '3.12' \
--hash=sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8 \
--hash=sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b
pycparser==2.21 ; (implementation_name != 'pypy' and os_name == 'nt' and sys_platform == 'win32') or (python_full_version >= '3.12' and implementation_name != 'pypy' and os_name == 'nt') \
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 \
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9
sniffio==1.3.1 ; sys_platform == 'win32' or python_full_version >= '3.12' \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2
sortedcontainers==2.4.0 ; sys_platform == 'win32' or python_full_version >= '3.12' \
--hash=sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88 \
--hash=sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0
trio==0.25.0 ; sys_platform == 'win32' or python_full_version >= '3.12' \
--hash=sha256:9b41f5993ad2c0e5f62d0acca320ec657fdb6b2a2c22b8c7aed6caf154475c4e \
--hash=sha256:e6458efe29cc543e557a91e614e2b51710eba2961669329ce9c862d50c6e8e81
----- stderr -----
Resolved 10 packages in [TIME]
"###);
Ok(())
}
#[test]
fn dependency_conflicting_markers() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = [
"trio==0.25.0 ; sys_platform == 'darwin'",
"trio==0.10.0 ; sys_platform == 'win32'",
]
[build-system]
requires = ["setuptools>=42", "wheel"]
build-backend = "setuptools.build_meta"
"#,
)?;
context.lock().assert().success();
uv_snapshot!(context.filters(), context.export(), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated via `uv export`.
-e .
async-generator==1.10 ; sys_platform == 'win32' \
--hash=sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144 \
--hash=sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b
attrs==23.2.0 ; sys_platform == 'darwin' or sys_platform == 'win32' \
--hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \
--hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1
cffi==1.16.0 ; (implementation_name != 'pypy' and os_name == 'nt' and sys_platform == 'darwin') or (os_name == 'nt' and sys_platform == 'win32') \
--hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \
--hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \
--hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \
--hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \
--hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \
--hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 \
--hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \
--hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \
--hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \
--hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \
--hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235
idna==3.6 ; sys_platform == 'darwin' or sys_platform == 'win32' \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
outcome==1.3.0.post0 ; sys_platform == 'darwin' or sys_platform == 'win32' \
--hash=sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8 \
--hash=sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b
pycparser==2.21 ; (implementation_name != 'pypy' and os_name == 'nt' and sys_platform == 'darwin') or (os_name == 'nt' and sys_platform == 'win32') \
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 \
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9
sniffio==1.3.1 ; sys_platform == 'darwin' or sys_platform == 'win32' \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2
sortedcontainers==2.4.0 ; sys_platform == 'darwin' or sys_platform == 'win32' \
--hash=sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88 \
--hash=sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0
trio==0.10.0 ; sys_platform == 'win32' \
--hash=sha256:d323cc15f6406d15954af91e5e34af2001cc24163fdde29e3f88a227a1b53ab0
trio==0.25.0 ; sys_platform == 'darwin' \
--hash=sha256:9b41f5993ad2c0e5f62d0acca320ec657fdb6b2a2c22b8c7aed6caf154475c4e \
--hash=sha256:e6458efe29cc543e557a91e614e2b51710eba2961669329ce9c862d50c6e8e81
----- stderr -----
Resolved 11 packages in [TIME]
"###);
Ok(())
}
#[test]
fn non_root() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio==3.7.0", "child"]
[tool.uv.workspace]
members = ["child"]
[tool.uv.sources]
child = { workspace = true }
[build-system]
requires = ["setuptools>=42", "wheel"]
build-backend = "setuptools.build_meta"
"#,
)?;
let child = context.temp_dir.child("child");
child.child("pyproject.toml").write_str(
r#"
[project]
name = "child"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig>=2"]
[build-system]
requires = ["setuptools>=42", "wheel"]
build-backend = "setuptools.build_meta"
"#,
)?;
context.lock().assert().success();
uv_snapshot!(context.filters(), context.export().arg("--package").arg("child"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated via `uv export`.
-e child
iniconfig==2.0.0 \
--hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \
--hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374
----- stderr -----
Resolved 6 packages in [TIME]
"###);
Ok(())
}
#[test]
fn relative_path() -> Result<()> {
let context = TestContext::new("3.12");
let dependency = context.temp_dir.child("dependency");
dependency.child("pyproject.toml").write_str(
r#"
[project]
name = "dependency"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig>=2"]
[build-system]
requires = ["setuptools>=42", "wheel"]
build-backend = "setuptools.build_meta"
"#,
)?;
let project = context.temp_dir.child("project");
project.child("pyproject.toml").write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["dependency"]
[tool.uv.sources]
dependency = { path = "../dependency" }
[build-system]
requires = ["setuptools>=42", "wheel"]
build-backend = "setuptools.build_meta"
"#,
)?;
context.lock().current_dir(&project).assert().success();
// Pipe the output to requirements.txt.
let file = std::fs::File::create(project.child("requirements.txt")).unwrap();
uv_snapshot!(context.filters(), context.export().stdout(Stdio::from(file)).current_dir(&project), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using Python 3.12.[X] interpreter at: [PYTHON-3.12]
Resolved 3 packages in [TIME]
"###);
// Read the file contents.
let contents = fs_err::read_to_string(project.child("requirements.txt")).unwrap();
insta::assert_snapshot!(contents, @r###"
# This file was autogenerated via `uv export`.
-e .
../dependency
iniconfig==2.0.0 \
--hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \
--hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374
"###);
// Install the dependencies.
uv_snapshot!(context.filters(), context.pip_install().arg("--requirement").arg("requirements.txt").current_dir(&project), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ dependency==0.1.0 (from file://[TEMP_DIR]/dependency)
+ iniconfig==2.0.0
+ project==0.1.0 (from file://[TEMP_DIR]/project)
"###);
Ok(())
}
#[test]
fn dev() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["typing-extensions"]
[tool.uv]
dev-dependencies = ["anyio"]
[build-system]
requires = ["setuptools>=42", "wheel"]
build-backend = "setuptools.build_meta"
"#,
)?;
context.lock().assert().success();
uv_snapshot!(context.filters(), context.export(), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated via `uv export`.
-e .
anyio==4.3.0 \
--hash=sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6 \
--hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
sniffio==1.3.1 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2
typing-extensions==4.10.0 \
--hash=sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb \
--hash=sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475
----- stderr -----
Resolved 5 packages in [TIME]
"###);
uv_snapshot!(context.filters(), context.export().arg("--no-dev"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated via `uv export`.
-e .
typing-extensions==4.10.0 \
--hash=sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb \
--hash=sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475
----- stderr -----
Resolved 5 packages in [TIME]
"###);
Ok(())
}

View file

@ -22,6 +22,7 @@ fn help() {
remove Remove dependencies from the project
sync Update the project's environment
lock Update the project's lockfile
export Export the project's lockfile to an alternate format
tree Display the project's dependency tree
tool Run and install commands provided by Python packages
python Manage Python versions and installations
@ -85,6 +86,7 @@ fn help_flag() {
remove Remove dependencies from the project
sync Update the project's environment
lock Update the project's lockfile
export Export the project's lockfile to an alternate format
tree Display the project's dependency tree
tool Run and install commands provided by Python packages
python Manage Python versions and installations
@ -146,6 +148,7 @@ fn help_short_flag() {
remove Remove dependencies from the project
sync Update the project's environment
lock Update the project's lockfile
export Export the project's lockfile to an alternate format
tree Display the project's dependency tree
tool Run and install commands provided by Python packages
python Manage Python versions and installations
@ -624,6 +627,7 @@ fn help_unknown_subcommand() {
remove
sync
lock
export
tree
tool
python
@ -647,6 +651,7 @@ fn help_unknown_subcommand() {
remove
sync
lock
export
tree
tool
python
@ -697,6 +702,7 @@ fn help_with_global_option() {
remove Remove dependencies from the project
sync Update the project's environment
lock Update the project's lockfile
export Export the project's lockfile to an alternate format
tree Display the project's dependency tree
tool Run and install commands provided by Python packages
python Manage Python versions and installations
@ -796,6 +802,7 @@ fn help_with_no_pager() {
remove Remove dependencies from the project
sync Update the project's environment
lock Update the project's lockfile
export Export the project's lockfile to an alternate format
tree Display the project's dependency tree
tool Run and install commands provided by Python packages
python Manage Python versions and installations