initial import of the packaging package in the standard library

This commit is contained in:
Tarek Ziade 2011-05-19 13:07:25 +02:00
parent 566f8a646e
commit 1231a4e097
193 changed files with 30376 additions and 149 deletions

View file

@ -0,0 +1,56 @@
"""Subpackage containing all standard commands."""
from packaging.errors import PackagingModuleError
from packaging.util import resolve_name
__all__ = ['get_command_names', 'set_command', 'get_command_class',
'STANDARD_COMMANDS']
_COMMANDS = {
'check': 'packaging.command.check.check',
'test': 'packaging.command.test.test',
'build': 'packaging.command.build.build',
'build_py': 'packaging.command.build_py.build_py',
'build_ext': 'packaging.command.build_ext.build_ext',
'build_clib': 'packaging.command.build_clib.build_clib',
'build_scripts': 'packaging.command.build_scripts.build_scripts',
'clean': 'packaging.command.clean.clean',
'install_dist': 'packaging.command.install_dist.install_dist',
'install_lib': 'packaging.command.install_lib.install_lib',
'install_headers': 'packaging.command.install_headers.install_headers',
'install_scripts': 'packaging.command.install_scripts.install_scripts',
'install_data': 'packaging.command.install_data.install_data',
'install_distinfo':
'packaging.command.install_distinfo.install_distinfo',
'sdist': 'packaging.command.sdist.sdist',
'bdist': 'packaging.command.bdist.bdist',
'bdist_dumb': 'packaging.command.bdist_dumb.bdist_dumb',
'bdist_wininst': 'packaging.command.bdist_wininst.bdist_wininst',
'register': 'packaging.command.register.register',
'upload': 'packaging.command.upload.upload',
'upload_docs': 'packaging.command.upload_docs.upload_docs'}
STANDARD_COMMANDS = set(_COMMANDS)
def get_command_names():
"""Return registered commands"""
return sorted(_COMMANDS)
def set_command(location):
cls = resolve_name(location)
# XXX we want to do the duck-type checking here
_COMMANDS[cls.get_command_name()] = cls
def get_command_class(name):
"""Return the registered command"""
try:
cls = _COMMANDS[name]
if isinstance(cls, str):
cls = resolve_name(cls)
_COMMANDS[name] = cls
return cls
except KeyError:
raise PackagingModuleError("Invalid command %s" % name)

View file

@ -0,0 +1,141 @@
"""Create a built (binary) distribution.
If a --formats option was given on the command line, this command will
call the corresponding bdist_* commands; if the option was absent, a
bdist_* command depending on the current platform will be called.
"""
import os
from packaging import util
from packaging.command.cmd import Command
from packaging.errors import PackagingPlatformError, PackagingOptionError
def show_formats():
"""Print list of available formats (arguments to "--format" option).
"""
from packaging.fancy_getopt import FancyGetopt
formats = []
for format in bdist.format_commands:
formats.append(("formats=" + format, None,
bdist.format_command[format][1]))
pretty_printer = FancyGetopt(formats)
pretty_printer.print_help("List of available distribution formats:")
class bdist(Command):
description = "create a built (binary) distribution"
user_options = [('bdist-base=', 'b',
"temporary directory for creating built distributions"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % util.get_platform()),
('formats=', None,
"formats for distribution (comma-separated list)"),
('dist-dir=', 'd',
"directory to put final built distributions in "
"[default: dist]"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
('owner=', 'u',
"Owner name used when creating a tar file"
" [default: current user]"),
('group=', 'g',
"Group name used when creating a tar file"
" [default: current group]"),
]
boolean_options = ['skip-build']
help_options = [
('help-formats', None,
"lists available distribution formats", show_formats),
]
# This is of course very simplistic. The various UNIX family operating
# systems have their specific formats, but they are out of scope for us;
# bdist_dumb is, well, dumb; it's more a building block for other
# packaging tools than a real end-user binary format.
default_format = {'posix': 'gztar',
'nt': 'zip',
'os2': 'zip'}
# Establish the preferred order (for the --help-formats option).
format_commands = ['gztar', 'bztar', 'ztar', 'tar',
'wininst', 'zip', 'msi']
# And the real information.
format_command = {'gztar': ('bdist_dumb', "gzip'ed tar file"),
'bztar': ('bdist_dumb', "bzip2'ed tar file"),
'ztar': ('bdist_dumb', "compressed tar file"),
'tar': ('bdist_dumb', "tar file"),
'wininst': ('bdist_wininst',
"Windows executable installer"),
'zip': ('bdist_dumb', "ZIP file"),
'msi': ('bdist_msi', "Microsoft Installer")
}
def initialize_options(self):
self.bdist_base = None
self.plat_name = None
self.formats = None
self.dist_dir = None
self.skip_build = False
self.group = None
self.owner = None
def finalize_options(self):
# have to finalize 'plat_name' before 'bdist_base'
if self.plat_name is None:
if self.skip_build:
self.plat_name = util.get_platform()
else:
self.plat_name = self.get_finalized_command('build').plat_name
# 'bdist_base' -- parent of per-built-distribution-format
# temporary directories (eg. we'll probably have
# "build/bdist.<plat>/dumb", etc.)
if self.bdist_base is None:
build_base = self.get_finalized_command('build').build_base
self.bdist_base = os.path.join(build_base,
'bdist.' + self.plat_name)
self.ensure_string_list('formats')
if self.formats is None:
try:
self.formats = [self.default_format[os.name]]
except KeyError:
raise PackagingPlatformError("don't know how to create built distributions " + \
"on platform %s" % os.name)
if self.dist_dir is None:
self.dist_dir = "dist"
def run(self):
# Figure out which sub-commands we need to run.
commands = []
for format in self.formats:
try:
commands.append(self.format_command[format][0])
except KeyError:
raise PackagingOptionError("invalid format '%s'" % format)
# Reinitialize and run each command.
for i in range(len(self.formats)):
cmd_name = commands[i]
sub_cmd = self.get_reinitialized_command(cmd_name)
# passing the owner and group names for tar archiving
if cmd_name == 'bdist_dumb':
sub_cmd.owner = self.owner
sub_cmd.group = self.group
# If we're going to need to run this command again, tell it to
# keep its temporary files around so subsequent runs go faster.
if cmd_name in commands[i+1:]:
sub_cmd.keep_temp = True
self.run_command(cmd_name)

View file

@ -0,0 +1,137 @@
"""Create a "dumb" built distribution.
A dumb distribution is just an archive meant to be unpacked under
sys.prefix or sys.exec_prefix.
"""
import os
from shutil import rmtree
from sysconfig import get_python_version
from packaging.util import get_platform
from packaging.command.cmd import Command
from packaging.errors import PackagingPlatformError
from packaging import logger
class bdist_dumb(Command):
description = 'create a "dumb" built distribution'
user_options = [('bdist-dir=', 'd',
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_platform()),
('format=', 'f',
"archive format to create (tar, ztar, gztar, zip)"),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
('relative', None,
"build the archive using relative paths"
"(default: false)"),
('owner=', 'u',
"Owner name used when creating a tar file"
" [default: current user]"),
('group=', 'g',
"Group name used when creating a tar file"
" [default: current group]"),
]
boolean_options = ['keep-temp', 'skip-build', 'relative']
default_format = { 'posix': 'gztar',
'nt': 'zip',
'os2': 'zip' }
def initialize_options(self):
self.bdist_dir = None
self.plat_name = None
self.format = None
self.keep_temp = False
self.dist_dir = None
self.skip_build = False
self.relative = False
self.owner = None
self.group = None
def finalize_options(self):
if self.bdist_dir is None:
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'dumb')
if self.format is None:
try:
self.format = self.default_format[os.name]
except KeyError:
raise PackagingPlatformError(("don't know how to create dumb built distributions " +
"on platform %s") % os.name)
self.set_undefined_options('bdist', 'dist_dir', 'plat_name')
def run(self):
if not self.skip_build:
self.run_command('build')
install = self.get_reinitialized_command('install_dist',
reinit_subcommands=True)
install.root = self.bdist_dir
install.skip_build = self.skip_build
install.warn_dir = False
logger.info("installing to %s", self.bdist_dir)
self.run_command('install_dist')
# And make an archive relative to the root of the
# pseudo-installation tree.
archive_basename = "%s.%s" % (self.distribution.get_fullname(),
self.plat_name)
# OS/2 objects to any ":" characters in a filename (such as when
# a timestamp is used in a version) so change them to hyphens.
if os.name == "os2":
archive_basename = archive_basename.replace(":", "-")
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
if not self.relative:
archive_root = self.bdist_dir
else:
if (self.distribution.has_ext_modules() and
(install.install_base != install.install_platbase)):
raise PackagingPlatformError(
"can't make a dumb built distribution where base and "
"platbase are different (%r, %r)" %
(install.install_base, install.install_platbase))
else:
archive_root = os.path.join(
self.bdist_dir,
self._ensure_relative(install.install_base))
# Make the archive
filename = self.make_archive(pseudoinstall_root,
self.format, root_dir=archive_root,
owner=self.owner, group=self.group)
if self.distribution.has_ext_modules():
pyversion = get_python_version()
else:
pyversion = 'any'
self.distribution.dist_files.append(('bdist_dumb', pyversion,
filename))
if not self.keep_temp:
if self.dry_run:
logger.info('removing %s', self.bdist_dir)
else:
rmtree(self.bdist_dir)
def _ensure_relative(self, path):
# copied from dir_util, deleted
drive, path = os.path.splitdrive(path)
if path[0:1] == os.sep:
path = drive + path[1:]
return path

View file

@ -0,0 +1,740 @@
"""Create a Microsoft Installer (.msi) binary distribution."""
# Copyright (C) 2005, 2006 Martin von Löwis
# Licensed to PSF under a Contributor Agreement.
import sys
import os
import msilib
from sysconfig import get_python_version
from shutil import rmtree
from packaging.command.cmd import Command
from packaging.version import NormalizedVersion
from packaging.errors import PackagingOptionError
from packaging import logger as log
from packaging.util import get_platform
from msilib import schema, sequence, text
from msilib import Directory, Feature, Dialog, add_data
class MSIVersion(NormalizedVersion):
"""
MSI ProductVersion must be strictly numeric.
MSIVersion disallows prerelease and postrelease versions.
"""
def __init__(self, *args, **kwargs):
super(MSIVersion, self).__init__(*args, **kwargs)
if not self.is_final:
raise ValueError("ProductVersion must be strictly numeric")
class PyDialog(Dialog):
"""Dialog class with a fixed layout: controls at the top, then a ruler,
then a list of buttons: back, next, cancel. Optionally a bitmap at the
left."""
def __init__(self, *args, **kw):
"""Dialog(database, name, x, y, w, h, attributes, title, first,
default, cancel, bitmap=true)"""
Dialog.__init__(self, *args)
ruler = self.h - 36
#if kw.get("bitmap", True):
# self.bitmap("Bitmap", 0, 0, bmwidth, ruler, "PythonWin")
self.line("BottomLine", 0, ruler, self.w, 0)
def title(self, title):
"Set the title text of the dialog at the top."
# name, x, y, w, h, flags=Visible|Enabled|Transparent|NoPrefix,
# text, in VerdanaBold10
self.text("Title", 15, 10, 320, 60, 0x30003,
r"{\VerdanaBold10}%s" % title)
def back(self, title, next, name = "Back", active = 1):
"""Add a back button with a given title, the tab-next button,
its name in the Control table, possibly initially disabled.
Return the button, so that events can be associated"""
if active:
flags = 3 # Visible|Enabled
else:
flags = 1 # Visible
return self.pushbutton(name, 180, self.h-27 , 56, 17, flags, title, next)
def cancel(self, title, next, name = "Cancel", active = 1):
"""Add a cancel button with a given title, the tab-next button,
its name in the Control table, possibly initially disabled.
Return the button, so that events can be associated"""
if active:
flags = 3 # Visible|Enabled
else:
flags = 1 # Visible
return self.pushbutton(name, 304, self.h-27, 56, 17, flags, title, next)
def next(self, title, next, name = "Next", active = 1):
"""Add a Next button with a given title, the tab-next button,
its name in the Control table, possibly initially disabled.
Return the button, so that events can be associated"""
if active:
flags = 3 # Visible|Enabled
else:
flags = 1 # Visible
return self.pushbutton(name, 236, self.h-27, 56, 17, flags, title, next)
def xbutton(self, name, title, next, xpos):
"""Add a button with a given title, the tab-next button,
its name in the Control table, giving its x position; the
y-position is aligned with the other buttons.
Return the button, so that events can be associated"""
return self.pushbutton(name, int(self.w*xpos - 28), self.h-27, 56, 17, 3, title, next)
class bdist_msi(Command):
description = "create a Microsoft Installer (.msi) binary distribution"
user_options = [('bdist-dir=', None,
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_platform()),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('target-version=', None,
"require a specific python version" +
" on the target system"),
('no-target-compile', 'c',
"do not compile .py to .pyc on the target system"),
('no-target-optimize', 'o',
"do not compile .py to .pyo (optimized)"
"on the target system"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
('install-script=', None,
"basename of installation script to be run after"
"installation or before deinstallation"),
('pre-install-script=', None,
"Fully qualified filename of a script to be run before "
"any files are installed. This script need not be in the "
"distribution"),
]
boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
'skip-build']
all_versions = ['2.0', '2.1', '2.2', '2.3', '2.4',
'2.5', '2.6', '2.7', '2.8', '2.9',
'3.0', '3.1', '3.2', '3.3', '3.4',
'3.5', '3.6', '3.7', '3.8', '3.9']
other_version = 'X'
def initialize_options(self):
self.bdist_dir = None
self.plat_name = None
self.keep_temp = False
self.no_target_compile = False
self.no_target_optimize = False
self.target_version = None
self.dist_dir = None
self.skip_build = False
self.install_script = None
self.pre_install_script = None
self.versions = None
def finalize_options(self):
if self.bdist_dir is None:
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'msi')
short_version = get_python_version()
if (not self.target_version) and self.distribution.has_ext_modules():
self.target_version = short_version
if self.target_version:
self.versions = [self.target_version]
if not self.skip_build and self.distribution.has_ext_modules()\
and self.target_version != short_version:
raise PackagingOptionError("target version can only be %s, or the '--skip-build'" \
" option must be specified" % (short_version,))
else:
self.versions = list(self.all_versions)
self.set_undefined_options('bdist', 'dist_dir', 'plat_name')
if self.pre_install_script:
raise PackagingOptionError("the pre-install-script feature is not yet implemented")
if self.install_script:
for script in self.distribution.scripts:
if self.install_script == os.path.basename(script):
break
else:
raise PackagingOptionError("install_script '%s' not found in scripts" % \
self.install_script)
self.install_script_key = None
def run(self):
if not self.skip_build:
self.run_command('build')
install = self.get_reinitialized_command('install_dist',
reinit_subcommands=True)
install.prefix = self.bdist_dir
install.skip_build = self.skip_build
install.warn_dir = False
install_lib = self.get_reinitialized_command('install_lib')
# we do not want to include pyc or pyo files
install_lib.compile = False
install_lib.optimize = 0
if self.distribution.has_ext_modules():
# If we are building an installer for a Python version other
# than the one we are currently running, then we need to ensure
# our build_lib reflects the other Python version rather than ours.
# Note that for target_version!=sys.version, we must have skipped the
# build step, so there is no issue with enforcing the build of this
# version.
target_version = self.target_version
if not target_version:
assert self.skip_build, "Should have already checked this"
target_version = sys.version[0:3]
plat_specifier = ".%s-%s" % (self.plat_name, target_version)
build = self.get_finalized_command('build')
build.build_lib = os.path.join(build.build_base,
'lib' + plat_specifier)
log.info("installing to %s", self.bdist_dir)
install.ensure_finalized()
# avoid warning of 'install_lib' about installing
# into a directory not in sys.path
sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
install.run()
del sys.path[0]
self.mkpath(self.dist_dir)
fullname = self.distribution.get_fullname()
installer_name = self.get_installer_filename(fullname)
installer_name = os.path.abspath(installer_name)
if os.path.exists(installer_name): os.unlink(installer_name)
metadata = self.distribution.metadata
author = metadata.author
if not author:
author = metadata.maintainer
if not author:
author = "UNKNOWN"
version = MSIVersion(metadata.get_version())
# Prefix ProductName with Python x.y, so that
# it sorts together with the other Python packages
# in Add-Remove-Programs (APR)
fullname = self.distribution.get_fullname()
if self.target_version:
product_name = "Python %s %s" % (self.target_version, fullname)
else:
product_name = "Python %s" % (fullname)
self.db = msilib.init_database(installer_name, schema,
product_name, msilib.gen_uuid(),
str(version), author)
msilib.add_tables(self.db, sequence)
props = [('DistVersion', version)]
email = metadata.author_email or metadata.maintainer_email
if email:
props.append(("ARPCONTACT", email))
if metadata.url:
props.append(("ARPURLINFOABOUT", metadata.url))
if props:
add_data(self.db, 'Property', props)
self.add_find_python()
self.add_files()
self.add_scripts()
self.add_ui()
self.db.Commit()
if hasattr(self.distribution, 'dist_files'):
tup = 'bdist_msi', self.target_version or 'any', fullname
self.distribution.dist_files.append(tup)
if not self.keep_temp:
log.info("removing temporary build directory %s", self.bdist_dir)
if not self.dry_run:
rmtree(self.bdist_dir)
def add_files(self):
db = self.db
cab = msilib.CAB("distfiles")
rootdir = os.path.abspath(self.bdist_dir)
root = Directory(db, cab, None, rootdir, "TARGETDIR", "SourceDir")
f = Feature(db, "Python", "Python", "Everything",
0, 1, directory="TARGETDIR")
items = [(f, root, '')]
for version in self.versions + [self.other_version]:
target = "TARGETDIR" + version
name = default = "Python" + version
desc = "Everything"
if version is self.other_version:
title = "Python from another location"
level = 2
else:
title = "Python %s from registry" % version
level = 1
f = Feature(db, name, title, desc, 1, level, directory=target)
dir = Directory(db, cab, root, rootdir, target, default)
items.append((f, dir, version))
db.Commit()
seen = {}
for feature, dir, version in items:
todo = [dir]
while todo:
dir = todo.pop()
for file in os.listdir(dir.absolute):
afile = os.path.join(dir.absolute, file)
if os.path.isdir(afile):
short = "%s|%s" % (dir.make_short(file), file)
default = file + version
newdir = Directory(db, cab, dir, file, default, short)
todo.append(newdir)
else:
if not dir.component:
dir.start_component(dir.logical, feature, 0)
if afile not in seen:
key = seen[afile] = dir.add_file(file)
if file==self.install_script:
if self.install_script_key:
raise PackagingOptionError(
"Multiple files with name %s" % file)
self.install_script_key = '[#%s]' % key
else:
key = seen[afile]
add_data(self.db, "DuplicateFile",
[(key + version, dir.component, key, None, dir.logical)])
db.Commit()
cab.commit(db)
def add_find_python(self):
"""Adds code to the installer to compute the location of Python.
Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the
registry for each version of Python.
Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined,
else from PYTHON.MACHINE.X.Y.
Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe"""
start = 402
for ver in self.versions:
install_path = r"SOFTWARE\Python\PythonCore\%s\InstallPath" % ver
machine_reg = "python.machine." + ver
user_reg = "python.user." + ver
machine_prop = "PYTHON.MACHINE." + ver
user_prop = "PYTHON.USER." + ver
machine_action = "PythonFromMachine" + ver
user_action = "PythonFromUser" + ver
exe_action = "PythonExe" + ver
target_dir_prop = "TARGETDIR" + ver
exe_prop = "PYTHON" + ver
if msilib.Win64:
# type: msidbLocatorTypeRawValue + msidbLocatorType64bit
Type = 2+16
else:
Type = 2
add_data(self.db, "RegLocator",
[(machine_reg, 2, install_path, None, Type),
(user_reg, 1, install_path, None, Type)])
add_data(self.db, "AppSearch",
[(machine_prop, machine_reg),
(user_prop, user_reg)])
add_data(self.db, "CustomAction",
[(machine_action, 51+256, target_dir_prop, "[" + machine_prop + "]"),
(user_action, 51+256, target_dir_prop, "[" + user_prop + "]"),
(exe_action, 51+256, exe_prop, "[" + target_dir_prop + "]\\python.exe"),
])
add_data(self.db, "InstallExecuteSequence",
[(machine_action, machine_prop, start),
(user_action, user_prop, start + 1),
(exe_action, None, start + 2),
])
add_data(self.db, "InstallUISequence",
[(machine_action, machine_prop, start),
(user_action, user_prop, start + 1),
(exe_action, None, start + 2),
])
add_data(self.db, "Condition",
[("Python" + ver, 0, "NOT TARGETDIR" + ver)])
start += 4
assert start < 500
def add_scripts(self):
if self.install_script:
start = 6800
for ver in self.versions + [self.other_version]:
install_action = "install_script." + ver
exe_prop = "PYTHON" + ver
add_data(self.db, "CustomAction",
[(install_action, 50, exe_prop, self.install_script_key)])
add_data(self.db, "InstallExecuteSequence",
[(install_action, "&Python%s=3" % ver, start)])
start += 1
# XXX pre-install scripts are currently refused in finalize_options()
# but if this feature is completed, it will also need to add
# entries for each version as the above code does
if self.pre_install_script:
scriptfn = os.path.join(self.bdist_dir, "preinstall.bat")
with open(scriptfn, "w") as f:
# The batch file will be executed with [PYTHON], so that %1
# is the path to the Python interpreter; %0 will be the path
# of the batch file.
# rem ="""
# %1 %0
# exit
# """
# <actual script>
f.write('rem ="""\n%1 %0\nexit\n"""\n')
with open(self.pre_install_script) as fp:
f.write(fp.read())
add_data(self.db, "Binary",
[("PreInstall", msilib.Binary(scriptfn)),
])
add_data(self.db, "CustomAction",
[("PreInstall", 2, "PreInstall", None),
])
add_data(self.db, "InstallExecuteSequence",
[("PreInstall", "NOT Installed", 450),
])
def add_ui(self):
db = self.db
x = y = 50
w = 370
h = 300
title = "[ProductName] Setup"
# see "Dialog Style Bits"
modal = 3 # visible | modal
modeless = 1 # visible
# UI customization properties
add_data(db, "Property",
# See "DefaultUIFont Property"
[("DefaultUIFont", "DlgFont8"),
# See "ErrorDialog Style Bit"
("ErrorDialog", "ErrorDlg"),
("Progress1", "Install"), # modified in maintenance type dlg
("Progress2", "installs"),
("MaintenanceForm_Action", "Repair"),
# possible values: ALL, JUSTME
("WhichUsers", "ALL")
])
# Fonts, see "TextStyle Table"
add_data(db, "TextStyle",
[("DlgFont8", "Tahoma", 9, None, 0),
("DlgFontBold8", "Tahoma", 8, None, 1), #bold
("VerdanaBold10", "Verdana", 10, None, 1),
("VerdanaRed9", "Verdana", 9, 255, 0),
])
# UI Sequences, see "InstallUISequence Table", "Using a Sequence Table"
# Numbers indicate sequence; see sequence.py for how these action integrate
add_data(db, "InstallUISequence",
[("PrepareDlg", "Not Privileged or Windows9x or Installed", 140),
("WhichUsersDlg", "Privileged and not Windows9x and not Installed", 141),
# In the user interface, assume all-users installation if privileged.
("SelectFeaturesDlg", "Not Installed", 1230),
# XXX no support for resume installations yet
#("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240),
("MaintenanceTypeDlg", "Installed AND NOT RESUME AND NOT Preselected", 1250),
("ProgressDlg", None, 1280)])
add_data(db, 'ActionText', text.ActionText)
add_data(db, 'UIText', text.UIText)
#####################################################################
# Standard dialogs: FatalError, UserExit, ExitDialog
fatal=PyDialog(db, "FatalError", x, y, w, h, modal, title,
"Finish", "Finish", "Finish")
fatal.title("[ProductName] Installer ended prematurely")
fatal.back("< Back", "Finish", active = 0)
fatal.cancel("Cancel", "Back", active = 0)
fatal.text("Description1", 15, 70, 320, 80, 0x30003,
"[ProductName] setup ended prematurely because of an error. Your system has not been modified. To install this program at a later time, please run the installation again.")
fatal.text("Description2", 15, 155, 320, 20, 0x30003,
"Click the Finish button to exit the Installer.")
c=fatal.next("Finish", "Cancel", name="Finish")
c.event("EndDialog", "Exit")
user_exit=PyDialog(db, "UserExit", x, y, w, h, modal, title,
"Finish", "Finish", "Finish")
user_exit.title("[ProductName] Installer was interrupted")
user_exit.back("< Back", "Finish", active = 0)
user_exit.cancel("Cancel", "Back", active = 0)
user_exit.text("Description1", 15, 70, 320, 80, 0x30003,
"[ProductName] setup was interrupted. Your system has not been modified. "
"To install this program at a later time, please run the installation again.")
user_exit.text("Description2", 15, 155, 320, 20, 0x30003,
"Click the Finish button to exit the Installer.")
c = user_exit.next("Finish", "Cancel", name="Finish")
c.event("EndDialog", "Exit")
exit_dialog = PyDialog(db, "ExitDialog", x, y, w, h, modal, title,
"Finish", "Finish", "Finish")
exit_dialog.title("Completing the [ProductName] Installer")
exit_dialog.back("< Back", "Finish", active = 0)
exit_dialog.cancel("Cancel", "Back", active = 0)
exit_dialog.text("Description", 15, 235, 320, 20, 0x30003,
"Click the Finish button to exit the Installer.")
c = exit_dialog.next("Finish", "Cancel", name="Finish")
c.event("EndDialog", "Return")
#####################################################################
# Required dialog: FilesInUse, ErrorDlg
inuse = PyDialog(db, "FilesInUse",
x, y, w, h,
19, # KeepModeless|Modal|Visible
title,
"Retry", "Retry", "Retry", bitmap=False)
inuse.text("Title", 15, 6, 200, 15, 0x30003,
r"{\DlgFontBold8}Files in Use")
inuse.text("Description", 20, 23, 280, 20, 0x30003,
"Some files that need to be updated are currently in use.")
inuse.text("Text", 20, 55, 330, 50, 3,
"The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.")
inuse.control("List", "ListBox", 20, 107, 330, 130, 7, "FileInUseProcess",
None, None, None)
c=inuse.back("Exit", "Ignore", name="Exit")
c.event("EndDialog", "Exit")
c=inuse.next("Ignore", "Retry", name="Ignore")
c.event("EndDialog", "Ignore")
c=inuse.cancel("Retry", "Exit", name="Retry")
c.event("EndDialog","Retry")
# See "Error Dialog". See "ICE20" for the required names of the controls.
error = Dialog(db, "ErrorDlg",
50, 10, 330, 101,
65543, # Error|Minimize|Modal|Visible
title,
"ErrorText", None, None)
error.text("ErrorText", 50,9,280,48,3, "")
#error.control("ErrorIcon", "Icon", 15, 9, 24, 24, 5242881, None, "py.ico", None, None)
error.pushbutton("N",120,72,81,21,3,"No",None).event("EndDialog","ErrorNo")
error.pushbutton("Y",240,72,81,21,3,"Yes",None).event("EndDialog","ErrorYes")
error.pushbutton("A",0,72,81,21,3,"Abort",None).event("EndDialog","ErrorAbort")
error.pushbutton("C",42,72,81,21,3,"Cancel",None).event("EndDialog","ErrorCancel")
error.pushbutton("I",81,72,81,21,3,"Ignore",None).event("EndDialog","ErrorIgnore")
error.pushbutton("O",159,72,81,21,3,"Ok",None).event("EndDialog","ErrorOk")
error.pushbutton("R",198,72,81,21,3,"Retry",None).event("EndDialog","ErrorRetry")
#####################################################################
# Global "Query Cancel" dialog
cancel = Dialog(db, "CancelDlg", 50, 10, 260, 85, 3, title,
"No", "No", "No")
cancel.text("Text", 48, 15, 194, 30, 3,
"Are you sure you want to cancel [ProductName] installation?")
#cancel.control("Icon", "Icon", 15, 15, 24, 24, 5242881, None,
# "py.ico", None, None)
c=cancel.pushbutton("Yes", 72, 57, 56, 17, 3, "Yes", "No")
c.event("EndDialog", "Exit")
c=cancel.pushbutton("No", 132, 57, 56, 17, 3, "No", "Yes")
c.event("EndDialog", "Return")
#####################################################################
# Global "Wait for costing" dialog
costing = Dialog(db, "WaitForCostingDlg", 50, 10, 260, 85, modal, title,
"Return", "Return", "Return")
costing.text("Text", 48, 15, 194, 30, 3,
"Please wait while the installer finishes determining your disk space requirements.")
c = costing.pushbutton("Return", 102, 57, 56, 17, 3, "Return", None)
c.event("EndDialog", "Exit")
#####################################################################
# Preparation dialog: no user input except cancellation
prep = PyDialog(db, "PrepareDlg", x, y, w, h, modeless, title,
"Cancel", "Cancel", "Cancel")
prep.text("Description", 15, 70, 320, 40, 0x30003,
"Please wait while the Installer prepares to guide you through the installation.")
prep.title("Welcome to the [ProductName] Installer")
c=prep.text("ActionText", 15, 110, 320, 20, 0x30003, "Pondering...")
c.mapping("ActionText", "Text")
c=prep.text("ActionData", 15, 135, 320, 30, 0x30003, None)
c.mapping("ActionData", "Text")
prep.back("Back", None, active=0)
prep.next("Next", None, active=0)
c=prep.cancel("Cancel", None)
c.event("SpawnDialog", "CancelDlg")
#####################################################################
# Feature (Python directory) selection
seldlg = PyDialog(db, "SelectFeaturesDlg", x, y, w, h, modal, title,
"Next", "Next", "Cancel")
seldlg.title("Select Python Installations")
seldlg.text("Hint", 15, 30, 300, 20, 3,
"Select the Python locations where %s should be installed."
% self.distribution.get_fullname())
seldlg.back("< Back", None, active=0)
c = seldlg.next("Next >", "Cancel")
order = 1
c.event("[TARGETDIR]", "[SourceDir]", ordering=order)
for version in self.versions + [self.other_version]:
order += 1
c.event("[TARGETDIR]", "[TARGETDIR%s]" % version,
"FEATURE_SELECTED AND &Python%s=3" % version,
ordering=order)
c.event("SpawnWaitDialog", "WaitForCostingDlg", ordering=order + 1)
c.event("EndDialog", "Return", ordering=order + 2)
c = seldlg.cancel("Cancel", "Features")
c.event("SpawnDialog", "CancelDlg")
c = seldlg.control("Features", "SelectionTree", 15, 60, 300, 120, 3,
"FEATURE", None, "PathEdit", None)
c.event("[FEATURE_SELECTED]", "1")
ver = self.other_version
install_other_cond = "FEATURE_SELECTED AND &Python%s=3" % ver
dont_install_other_cond = "FEATURE_SELECTED AND &Python%s<>3" % ver
c = seldlg.text("Other", 15, 200, 300, 15, 3,
"Provide an alternate Python location")
c.condition("Enable", install_other_cond)
c.condition("Show", install_other_cond)
c.condition("Disable", dont_install_other_cond)
c.condition("Hide", dont_install_other_cond)
c = seldlg.control("PathEdit", "PathEdit", 15, 215, 300, 16, 1,
"TARGETDIR" + ver, None, "Next", None)
c.condition("Enable", install_other_cond)
c.condition("Show", install_other_cond)
c.condition("Disable", dont_install_other_cond)
c.condition("Hide", dont_install_other_cond)
#####################################################################
# Disk cost
cost = PyDialog(db, "DiskCostDlg", x, y, w, h, modal, title,
"OK", "OK", "OK", bitmap=False)
cost.text("Title", 15, 6, 200, 15, 0x30003,
"{\DlgFontBold8}Disk Space Requirements")
cost.text("Description", 20, 20, 280, 20, 0x30003,
"The disk space required for the installation of the selected features.")
cost.text("Text", 20, 53, 330, 60, 3,
"The highlighted volumes (if any) do not have enough disk space "
"available for the currently selected features. You can either "
"remove some files from the highlighted volumes, or choose to "
"install less features onto local drive(s), or select different "
"destination drive(s).")
cost.control("VolumeList", "VolumeCostList", 20, 100, 330, 150, 393223,
None, "{120}{70}{70}{70}{70}", None, None)
cost.xbutton("OK", "Ok", None, 0.5).event("EndDialog", "Return")
#####################################################################
# WhichUsers Dialog. Only available on NT, and for privileged users.
# This must be run before FindRelatedProducts, because that will
# take into account whether the previous installation was per-user
# or per-machine. We currently don't support going back to this
# dialog after "Next" was selected; to support this, we would need to
# find how to reset the ALLUSERS property, and how to re-run
# FindRelatedProducts.
# On Windows9x, the ALLUSERS property is ignored on the command line
# and in the Property table, but installer fails according to the documentation
# if a dialog attempts to set ALLUSERS.
whichusers = PyDialog(db, "WhichUsersDlg", x, y, w, h, modal, title,
"AdminInstall", "Next", "Cancel")
whichusers.title("Select whether to install [ProductName] for all users of this computer.")
# A radio group with two options: allusers, justme
g = whichusers.radiogroup("AdminInstall", 15, 60, 260, 50, 3,
"WhichUsers", "", "Next")
g.add("ALL", 0, 5, 150, 20, "Install for all users")
g.add("JUSTME", 0, 25, 150, 20, "Install just for me")
whichusers.back("Back", None, active=0)
c = whichusers.next("Next >", "Cancel")
c.event("[ALLUSERS]", "1", 'WhichUsers="ALL"', 1)
c.event("EndDialog", "Return", ordering = 2)
c = whichusers.cancel("Cancel", "AdminInstall")
c.event("SpawnDialog", "CancelDlg")
#####################################################################
# Installation Progress dialog (modeless)
progress = PyDialog(db, "ProgressDlg", x, y, w, h, modeless, title,
"Cancel", "Cancel", "Cancel", bitmap=False)
progress.text("Title", 20, 15, 200, 15, 0x30003,
"{\DlgFontBold8}[Progress1] [ProductName]")
progress.text("Text", 35, 65, 300, 30, 3,
"Please wait while the Installer [Progress2] [ProductName]. "
"This may take several minutes.")
progress.text("StatusLabel", 35, 100, 35, 20, 3, "Status:")
c=progress.text("ActionText", 70, 100, w-70, 20, 3, "Pondering...")
c.mapping("ActionText", "Text")
#c=progress.text("ActionData", 35, 140, 300, 20, 3, None)
#c.mapping("ActionData", "Text")
c=progress.control("ProgressBar", "ProgressBar", 35, 120, 300, 10, 65537,
None, "Progress done", None, None)
c.mapping("SetProgress", "Progress")
progress.back("< Back", "Next", active=False)
progress.next("Next >", "Cancel", active=False)
progress.cancel("Cancel", "Back").event("SpawnDialog", "CancelDlg")
###################################################################
# Maintenance type: repair/uninstall
maint = PyDialog(db, "MaintenanceTypeDlg", x, y, w, h, modal, title,
"Next", "Next", "Cancel")
maint.title("Welcome to the [ProductName] Setup Wizard")
maint.text("BodyText", 15, 63, 330, 42, 3,
"Select whether you want to repair or remove [ProductName].")
g=maint.radiogroup("RepairRadioGroup", 15, 108, 330, 60, 3,
"MaintenanceForm_Action", "", "Next")
#g.add("Change", 0, 0, 200, 17, "&Change [ProductName]")
g.add("Repair", 0, 18, 200, 17, "&Repair [ProductName]")
g.add("Remove", 0, 36, 200, 17, "Re&move [ProductName]")
maint.back("< Back", None, active=False)
c=maint.next("Finish", "Cancel")
# Change installation: Change progress dialog to "Change", then ask
# for feature selection
#c.event("[Progress1]", "Change", 'MaintenanceForm_Action="Change"', 1)
#c.event("[Progress2]", "changes", 'MaintenanceForm_Action="Change"', 2)
# Reinstall: Change progress dialog to "Repair", then invoke reinstall
# Also set list of reinstalled features to "ALL"
c.event("[REINSTALL]", "ALL", 'MaintenanceForm_Action="Repair"', 5)
c.event("[Progress1]", "Repairing", 'MaintenanceForm_Action="Repair"', 6)
c.event("[Progress2]", "repairs", 'MaintenanceForm_Action="Repair"', 7)
c.event("Reinstall", "ALL", 'MaintenanceForm_Action="Repair"', 8)
# Uninstall: Change progress to "Remove", then invoke uninstall
# Also set list of removed features to "ALL"
c.event("[REMOVE]", "ALL", 'MaintenanceForm_Action="Remove"', 11)
c.event("[Progress1]", "Removing", 'MaintenanceForm_Action="Remove"', 12)
c.event("[Progress2]", "removes", 'MaintenanceForm_Action="Remove"', 13)
c.event("Remove", "ALL", 'MaintenanceForm_Action="Remove"', 14)
# Close dialog when maintenance action scheduled
c.event("EndDialog", "Return", 'MaintenanceForm_Action<>"Change"', 20)
#c.event("NewDialog", "SelectFeaturesDlg", 'MaintenanceForm_Action="Change"', 21)
maint.cancel("Cancel", "RepairRadioGroup").event("SpawnDialog", "CancelDlg")
def get_installer_filename(self, fullname):
# Factored out to allow overriding in subclasses
if self.target_version:
base_name = "%s.%s-py%s.msi" % (fullname, self.plat_name,
self.target_version)
else:
base_name = "%s.%s.msi" % (fullname, self.plat_name)
installer_name = os.path.join(self.dist_dir, base_name)
return installer_name

View file

@ -0,0 +1,342 @@
"""Create an executable installer for Windows."""
# FIXME synchronize bytes/str use with same file in distutils
import sys
import os
from shutil import rmtree
from sysconfig import get_python_version
from packaging.command.cmd import Command
from packaging.errors import PackagingOptionError, PackagingPlatformError
from packaging import logger
from packaging.util import get_platform
class bdist_wininst(Command):
description = "create an executable installer for Windows"
user_options = [('bdist-dir=', None,
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_platform()),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('target-version=', None,
"require a specific python version" +
" on the target system"),
('no-target-compile', 'c',
"do not compile .py to .pyc on the target system"),
('no-target-optimize', 'o',
"do not compile .py to .pyo (optimized)"
"on the target system"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('bitmap=', 'b',
"bitmap to use for the installer instead of python-powered logo"),
('title=', 't',
"title to display on the installer background instead of default"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
('install-script=', None,
"basename of installation script to be run after"
"installation or before deinstallation"),
('pre-install-script=', None,
"Fully qualified filename of a script to be run before "
"any files are installed. This script need not be in the "
"distribution"),
('user-access-control=', None,
"specify Vista's UAC handling - 'none'/default=no "
"handling, 'auto'=use UAC if target Python installed for "
"all users, 'force'=always use UAC"),
]
boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
'skip-build']
def initialize_options(self):
self.bdist_dir = None
self.plat_name = None
self.keep_temp = False
self.no_target_compile = False
self.no_target_optimize = False
self.target_version = None
self.dist_dir = None
self.bitmap = None
self.title = None
self.skip_build = False
self.install_script = None
self.pre_install_script = None
self.user_access_control = None
def finalize_options(self):
if self.bdist_dir is None:
if self.skip_build and self.plat_name:
# If build is skipped and plat_name is overridden, bdist will
# not see the correct 'plat_name' - so set that up manually.
bdist = self.distribution.get_command_obj('bdist')
bdist.plat_name = self.plat_name
# next the command will be initialized using that name
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'wininst')
if not self.target_version:
self.target_version = ""
if not self.skip_build and self.distribution.has_ext_modules():
short_version = get_python_version()
if self.target_version and self.target_version != short_version:
raise PackagingOptionError("target version can only be %s, or the '--skip-build'" \
" option must be specified" % (short_version,))
self.target_version = short_version
self.set_undefined_options('bdist', 'dist_dir', 'plat_name')
if self.install_script:
for script in self.distribution.scripts:
if self.install_script == os.path.basename(script):
break
else:
raise PackagingOptionError("install_script '%s' not found in scripts" % \
self.install_script)
def run(self):
if (sys.platform != "win32" and
(self.distribution.has_ext_modules() or
self.distribution.has_c_libraries())):
raise PackagingPlatformError \
("distribution contains extensions and/or C libraries; "
"must be compiled on a Windows 32 platform")
if not self.skip_build:
self.run_command('build')
install = self.get_reinitialized_command('install',
reinit_subcommands=True)
install.root = self.bdist_dir
install.skip_build = self.skip_build
install.warn_dir = False
install.plat_name = self.plat_name
install_lib = self.get_reinitialized_command('install_lib')
# we do not want to include pyc or pyo files
install_lib.compile = False
install_lib.optimize = 0
if self.distribution.has_ext_modules():
# If we are building an installer for a Python version other
# than the one we are currently running, then we need to ensure
# our build_lib reflects the other Python version rather than ours.
# Note that for target_version!=sys.version, we must have skipped the
# build step, so there is no issue with enforcing the build of this
# version.
target_version = self.target_version
if not target_version:
assert self.skip_build, "Should have already checked this"
target_version = sys.version[0:3]
plat_specifier = ".%s-%s" % (self.plat_name, target_version)
build = self.get_finalized_command('build')
build.build_lib = os.path.join(build.build_base,
'lib' + plat_specifier)
# Use a custom scheme for the zip-file, because we have to decide
# at installation time which scheme to use.
for key in ('purelib', 'platlib', 'headers', 'scripts', 'data'):
value = key.upper()
if key == 'headers':
value = value + '/Include/$dist_name'
setattr(install,
'install_' + key,
value)
logger.info("installing to %s", self.bdist_dir)
install.ensure_finalized()
# avoid warning of 'install_lib' about installing
# into a directory not in sys.path
sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
install.run()
del sys.path[0]
# And make an archive relative to the root of the
# pseudo-installation tree.
from tempfile import NamedTemporaryFile
archive_basename = NamedTemporaryFile().name
fullname = self.distribution.get_fullname()
arcname = self.make_archive(archive_basename, "zip",
root_dir=self.bdist_dir)
# create an exe containing the zip-file
self.create_exe(arcname, fullname, self.bitmap)
if self.distribution.has_ext_modules():
pyversion = get_python_version()
else:
pyversion = 'any'
self.distribution.dist_files.append(('bdist_wininst', pyversion,
self.get_installer_filename(fullname)))
# remove the zip-file again
logger.debug("removing temporary file '%s'", arcname)
os.remove(arcname)
if not self.keep_temp:
if self.dry_run:
logger.info('removing %s', self.bdist_dir)
else:
rmtree(self.bdist_dir)
def get_inidata(self):
# Return data describing the installation.
lines = []
metadata = self.distribution.metadata
# Write the [metadata] section.
lines.append("[metadata]")
# 'info' will be displayed in the installer's dialog box,
# describing the items to be installed.
info = (metadata.long_description or '') + '\n'
# Escape newline characters
def escape(s):
return s.replace("\n", "\\n")
for name in ["author", "author_email", "description", "maintainer",
"maintainer_email", "name", "url", "version"]:
data = getattr(metadata, name, "")
if data:
info = info + ("\n %s: %s" % \
(name.capitalize(), escape(data)))
lines.append("%s=%s" % (name, escape(data)))
# The [setup] section contains entries controlling
# the installer runtime.
lines.append("\n[Setup]")
if self.install_script:
lines.append("install_script=%s" % self.install_script)
lines.append("info=%s" % escape(info))
lines.append("target_compile=%d" % (not self.no_target_compile))
lines.append("target_optimize=%d" % (not self.no_target_optimize))
if self.target_version:
lines.append("target_version=%s" % self.target_version)
if self.user_access_control:
lines.append("user_access_control=%s" % self.user_access_control)
title = self.title or self.distribution.get_fullname()
lines.append("title=%s" % escape(title))
import time
import packaging
build_info = "Built %s with packaging-%s" % \
(time.ctime(time.time()), packaging.__version__)
lines.append("build_info=%s" % build_info)
return "\n".join(lines)
def create_exe(self, arcname, fullname, bitmap=None):
import struct
self.mkpath(self.dist_dir)
cfgdata = self.get_inidata()
installer_name = self.get_installer_filename(fullname)
logger.info("creating %s", installer_name)
if bitmap:
with open(bitmap, "rb") as fp:
bitmapdata = fp.read()
bitmaplen = len(bitmapdata)
else:
bitmaplen = 0
with open(installer_name, "wb") as file:
file.write(self.get_exe_bytes())
if bitmap:
file.write(bitmapdata)
# Convert cfgdata from unicode to ascii, mbcs encoded
if isinstance(cfgdata, str):
cfgdata = cfgdata.encode("mbcs")
# Append the pre-install script
cfgdata = cfgdata + "\0"
if self.pre_install_script:
with open(self.pre_install_script) as fp:
script_data = fp.read()
cfgdata = cfgdata + script_data + "\n\0"
else:
# empty pre-install script
cfgdata = cfgdata + "\0"
file.write(cfgdata)
# The 'magic number' 0x1234567B is used to make sure that the
# binary layout of 'cfgdata' is what the wininst.exe binary
# expects. If the layout changes, increment that number, make
# the corresponding changes to the wininst.exe sources, and
# recompile them.
header = struct.pack("<iii",
0x1234567B, # tag
len(cfgdata), # length
bitmaplen, # number of bytes in bitmap
)
file.write(header)
with open(arcname, "rb") as fp:
file.write(fp.read())
def get_installer_filename(self, fullname):
# Factored out to allow overriding in subclasses
if self.target_version:
# if we create an installer for a specific python version,
# it's better to include this in the name
installer_name = os.path.join(self.dist_dir,
"%s.%s-py%s.exe" %
(fullname, self.plat_name, self.target_version))
else:
installer_name = os.path.join(self.dist_dir,
"%s.%s.exe" % (fullname, self.plat_name))
return installer_name
def get_exe_bytes(self):
from packaging.compiler.msvccompiler import get_build_version
# If a target-version other than the current version has been
# specified, then using the MSVC version from *this* build is no good.
# Without actually finding and executing the target version and parsing
# its sys.version, we just hard-code our knowledge of old versions.
# NOTE: Possible alternative is to allow "--target-version" to
# specify a Python executable rather than a simple version string.
# We can then execute this program to obtain any info we need, such
# as the real sys.version string for the build.
cur_version = get_python_version()
if self.target_version and self.target_version != cur_version:
# If the target version is *later* than us, then we assume they
# use what we use
# string compares seem wrong, but are what sysconfig.py itself uses
if self.target_version > cur_version:
bv = get_build_version()
else:
if self.target_version < "2.4":
bv = 6.0
else:
bv = 7.1
else:
# for current version - use authoritative check.
bv = get_build_version()
# wininst-x.y.exe is in the same directory as this file
directory = os.path.dirname(__file__)
# we must use a wininst-x.y.exe built with the same C compiler
# used for python. XXX What about mingw, borland, and so on?
# if plat_name starts with "win" but is not "win32"
# we want to strip "win" and leave the rest (e.g. -amd64)
# for all other cases, we don't want any suffix
if self.plat_name != 'win32' and self.plat_name[:3] == 'win':
sfix = self.plat_name[3:]
else:
sfix = ''
filename = os.path.join(directory, "wininst-%.1f%s.exe" % (bv, sfix))
with open(filename, "rb") as fp:
return fp.read()

View file

@ -0,0 +1,151 @@
"""Main build command, which calls the other build_* commands."""
import sys
import os
from packaging.util import get_platform
from packaging.command.cmd import Command
from packaging.errors import PackagingOptionError
from packaging.compiler import show_compilers
class build(Command):
description = "build everything needed to install"
user_options = [
('build-base=', 'b',
"base directory for build library"),
('build-purelib=', None,
"build directory for platform-neutral distributions"),
('build-platlib=', None,
"build directory for platform-specific distributions"),
('build-lib=', None,
"build directory for all distribution (defaults to either " +
"build-purelib or build-platlib"),
('build-scripts=', None,
"build directory for scripts"),
('build-temp=', 't',
"temporary build directory"),
('plat-name=', 'p',
"platform name to build for, if supported "
"(default: %s)" % get_platform()),
('compiler=', 'c',
"specify the compiler type"),
('debug', 'g',
"compile extensions and libraries with debugging information"),
('force', 'f',
"forcibly build everything (ignore file timestamps)"),
('executable=', 'e',
"specify final destination interpreter path (build.py)"),
('use-2to3', None,
"use 2to3 to make source python 3.x compatible"),
('convert-2to3-doctests', None,
"use 2to3 to convert doctests in seperate text files"),
('use-2to3-fixers', None,
"list additional fixers opted for during 2to3 conversion"),
]
boolean_options = ['debug', 'force']
help_options = [
('help-compiler', None,
"list available compilers", show_compilers),
]
def initialize_options(self):
self.build_base = 'build'
# these are decided only after 'build_base' has its final value
# (unless overridden by the user or client)
self.build_purelib = None
self.build_platlib = None
self.build_lib = None
self.build_temp = None
self.build_scripts = None
self.compiler = None
self.plat_name = None
self.debug = None
self.force = False
self.executable = None
self.use_2to3 = False
self.convert_2to3_doctests = None
self.use_2to3_fixers = None
def finalize_options(self):
if self.plat_name is None:
self.plat_name = get_platform()
else:
# plat-name only supported for windows (other platforms are
# supported via ./configure flags, if at all). Avoid misleading
# other platforms.
if os.name != 'nt':
raise PackagingOptionError(
"--plat-name only supported on Windows (try "
"using './configure --help' on your platform)")
plat_specifier = ".%s-%s" % (self.plat_name, sys.version[0:3])
# Make it so Python 2.x and Python 2.x with --with-pydebug don't
# share the same build directories. Doing so confuses the build
# process for C modules
if hasattr(sys, 'gettotalrefcount'):
plat_specifier += '-pydebug'
# 'build_purelib' and 'build_platlib' just default to 'lib' and
# 'lib.<plat>' under the base build directory. We only use one of
# them for a given distribution, though --
if self.build_purelib is None:
self.build_purelib = os.path.join(self.build_base, 'lib')
if self.build_platlib is None:
self.build_platlib = os.path.join(self.build_base,
'lib' + plat_specifier)
# 'build_lib' is the actual directory that we will use for this
# particular module distribution -- if user didn't supply it, pick
# one of 'build_purelib' or 'build_platlib'.
if self.build_lib is None:
if self.distribution.ext_modules:
self.build_lib = self.build_platlib
else:
self.build_lib = self.build_purelib
# 'build_temp' -- temporary directory for compiler turds,
# "build/temp.<plat>"
if self.build_temp is None:
self.build_temp = os.path.join(self.build_base,
'temp' + plat_specifier)
if self.build_scripts is None:
self.build_scripts = os.path.join(self.build_base,
'scripts-' + sys.version[0:3])
if self.executable is None:
self.executable = os.path.normpath(sys.executable)
def run(self):
# Run all relevant sub-commands. This will be some subset of:
# - build_py - pure Python modules
# - build_clib - standalone C libraries
# - build_ext - Python extension modules
# - build_scripts - Python scripts
for cmd_name in self.get_sub_commands():
self.run_command(cmd_name)
# -- Predicates for the sub-command list ---------------------------
def has_pure_modules(self):
return self.distribution.has_pure_modules()
def has_c_libraries(self):
return self.distribution.has_c_libraries()
def has_ext_modules(self):
return self.distribution.has_ext_modules()
def has_scripts(self):
return self.distribution.has_scripts()
sub_commands = [('build_py', has_pure_modules),
('build_clib', has_c_libraries),
('build_ext', has_ext_modules),
('build_scripts', has_scripts),
]

View file

@ -0,0 +1,198 @@
"""Build C/C++ libraries.
This command is useful to build libraries that are included in the
distribution and needed by extension modules.
"""
# XXX this module has *lots* of code ripped-off quite transparently from
# build_ext.py -- not surprisingly really, as the work required to build
# a static library from a collection of C source files is not really all
# that different from what's required to build a shared object file from
# a collection of C source files. Nevertheless, I haven't done the
# necessary refactoring to account for the overlap in code between the
# two modules, mainly because a number of subtle details changed in the
# cut 'n paste. Sigh.
import os
from packaging.command.cmd import Command
from packaging.errors import PackagingSetupError
from packaging.compiler import customize_compiler
from packaging import logger
def show_compilers():
from packaging.compiler import show_compilers
show_compilers()
class build_clib(Command):
description = "build C/C++ libraries used by extension modules"
user_options = [
('build-clib=', 'b',
"directory to build C/C++ libraries to"),
('build-temp=', 't',
"directory to put temporary build by-products"),
('debug', 'g',
"compile with debugging information"),
('force', 'f',
"forcibly build everything (ignore file timestamps)"),
('compiler=', 'c',
"specify the compiler type"),
]
boolean_options = ['debug', 'force']
help_options = [
('help-compiler', None,
"list available compilers", show_compilers),
]
def initialize_options(self):
self.build_clib = None
self.build_temp = None
# List of libraries to build
self.libraries = None
# Compilation options for all libraries
self.include_dirs = None
self.define = None
self.undef = None
self.debug = None
self.force = False
self.compiler = None
def finalize_options(self):
# This might be confusing: both build-clib and build-temp default
# to build-temp as defined by the "build" command. This is because
# I think that C libraries are really just temporary build
# by-products, at least from the point of view of building Python
# extensions -- but I want to keep my options open.
self.set_undefined_options('build',
('build_temp', 'build_clib'),
('build_temp', 'build_temp'),
'compiler', 'debug', 'force')
self.libraries = self.distribution.libraries
if self.libraries:
self.check_library_list(self.libraries)
if self.include_dirs is None:
self.include_dirs = self.distribution.include_dirs or []
if isinstance(self.include_dirs, str):
self.include_dirs = self.include_dirs.split(os.pathsep)
# XXX same as for build_ext -- what about 'self.define' and
# 'self.undef' ?
def run(self):
if not self.libraries:
return
# Yech -- this is cut 'n pasted from build_ext.py!
from packaging.compiler import new_compiler
self.compiler = new_compiler(compiler=self.compiler,
dry_run=self.dry_run,
force=self.force)
customize_compiler(self.compiler)
if self.include_dirs is not None:
self.compiler.set_include_dirs(self.include_dirs)
if self.define is not None:
# 'define' option is a list of (name,value) tuples
for name, value in self.define:
self.compiler.define_macro(name, value)
if self.undef is not None:
for macro in self.undef:
self.compiler.undefine_macro(macro)
self.build_libraries(self.libraries)
def check_library_list(self, libraries):
"""Ensure that the list of libraries is valid.
`library` is presumably provided as a command option 'libraries'.
This method checks that it is a list of 2-tuples, where the tuples
are (library_name, build_info_dict).
Raise PackagingSetupError if the structure is invalid anywhere;
just returns otherwise.
"""
if not isinstance(libraries, list):
raise PackagingSetupError("'libraries' option must be a list of tuples")
for lib in libraries:
if not isinstance(lib, tuple) and len(lib) != 2:
raise PackagingSetupError("each element of 'libraries' must a 2-tuple")
name, build_info = lib
if not isinstance(name, str):
raise PackagingSetupError("first element of each tuple in 'libraries' " + \
"must be a string (the library name)")
if '/' in name or (os.sep != '/' and os.sep in name):
raise PackagingSetupError(("bad library name '%s': " +
"may not contain directory separators") % \
lib[0])
if not isinstance(build_info, dict):
raise PackagingSetupError("second element of each tuple in 'libraries' " + \
"must be a dictionary (build info)")
def get_library_names(self):
# Assume the library list is valid -- 'check_library_list()' is
# called from 'finalize_options()', so it should be!
if not self.libraries:
return None
lib_names = []
for lib_name, build_info in self.libraries:
lib_names.append(lib_name)
return lib_names
def get_source_files(self):
self.check_library_list(self.libraries)
filenames = []
for lib_name, build_info in self.libraries:
sources = build_info.get('sources')
if sources is None or not isinstance(sources, (list, tuple)):
raise PackagingSetupError(("in 'libraries' option (library '%s'), "
"'sources' must be present and must be "
"a list of source filenames") % lib_name)
filenames.extend(sources)
return filenames
def build_libraries(self, libraries):
for lib_name, build_info in libraries:
sources = build_info.get('sources')
if sources is None or not isinstance(sources, (list, tuple)):
raise PackagingSetupError(("in 'libraries' option (library '%s'), " +
"'sources' must be present and must be " +
"a list of source filenames") % lib_name)
sources = list(sources)
logger.info("building '%s' library", lib_name)
# First, compile the source code to object files in the library
# directory. (This should probably change to putting object
# files in a temporary build directory.)
macros = build_info.get('macros')
include_dirs = build_info.get('include_dirs')
objects = self.compiler.compile(sources,
output_dir=self.build_temp,
macros=macros,
include_dirs=include_dirs,
debug=self.debug)
# Now "link" the object files together into a static library.
# (On Unix at least, this isn't really linking -- it just
# builds an archive. Whatever.)
self.compiler.create_static_lib(objects, lib_name,
output_dir=self.build_clib,
debug=self.debug)

View file

@ -0,0 +1,666 @@
"""Build extension modules."""
# FIXME Is this module limited to C extensions or do C++ extensions work too?
# The docstring of this module said that C++ was not supported, but other
# comments contradict that.
import os
import re
import sys
import logging
import sysconfig
from packaging.util import get_platform
from packaging.command.cmd import Command
from packaging.errors import (CCompilerError, CompileError, PackagingError,
PackagingPlatformError, PackagingSetupError)
from packaging.compiler import customize_compiler, show_compilers
from packaging.util import newer_group
from packaging.compiler.extension import Extension
from packaging import logger
import site
HAS_USER_SITE = True
if os.name == 'nt':
from packaging.compiler.msvccompiler import get_build_version
MSVC_VERSION = int(get_build_version())
# An extension name is just a dot-separated list of Python NAMEs (ie.
# the same as a fully-qualified module name).
extension_name_re = re.compile \
(r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$')
class build_ext(Command):
description = "build C/C++ extension modules (compile/link to build directory)"
# XXX thoughts on how to deal with complex command-line options like
# these, i.e. how to make it so fancy_getopt can suck them off the
# command line and make it look like setup.py defined the appropriate
# lists of tuples of what-have-you.
# - each command needs a callback to process its command-line options
# - Command.__init__() needs access to its share of the whole
# command line (must ultimately come from
# Distribution.parse_command_line())
# - it then calls the current command class' option-parsing
# callback to deal with weird options like -D, which have to
# parse the option text and churn out some custom data
# structure
# - that data structure (in this case, a list of 2-tuples)
# will then be present in the command object by the time
# we get to finalize_options() (i.e. the constructor
# takes care of both command-line and client options
# in between initialize_options() and finalize_options())
sep_by = " (separated by '%s')" % os.pathsep
user_options = [
('build-lib=', 'b',
"directory for compiled extension modules"),
('build-temp=', 't',
"directory for temporary files (build by-products)"),
('plat-name=', 'p',
"platform name to cross-compile for, if supported "
"(default: %s)" % get_platform()),
('inplace', 'i',
"ignore build-lib and put compiled extensions into the source " +
"directory alongside your pure Python modules"),
('include-dirs=', 'I',
"list of directories to search for header files" + sep_by),
('define=', 'D',
"C preprocessor macros to define"),
('undef=', 'U',
"C preprocessor macros to undefine"),
('libraries=', 'l',
"external C libraries to link with"),
('library-dirs=', 'L',
"directories to search for external C libraries" + sep_by),
('rpath=', 'R',
"directories to search for shared C libraries at runtime"),
('link-objects=', 'O',
"extra explicit link objects to include in the link"),
('debug', 'g',
"compile/link with debugging information"),
('force', 'f',
"forcibly build everything (ignore file timestamps)"),
('compiler=', 'c',
"specify the compiler type"),
('swig-opts=', None,
"list of SWIG command-line options"),
('swig=', None,
"path to the SWIG executable"),
]
boolean_options = ['inplace', 'debug', 'force']
if HAS_USER_SITE:
user_options.append(('user', None,
"add user include, library and rpath"))
boolean_options.append('user')
help_options = [
('help-compiler', None,
"list available compilers", show_compilers),
]
def initialize_options(self):
self.extensions = None
self.build_lib = None
self.plat_name = None
self.build_temp = None
self.inplace = False
self.package = None
self.include_dirs = None
self.define = None
self.undef = None
self.libraries = None
self.library_dirs = None
self.rpath = None
self.link_objects = None
self.debug = None
self.force = None
self.compiler = None
self.swig = None
self.swig_opts = None
if HAS_USER_SITE:
self.user = None
def finalize_options(self):
self.set_undefined_options('build',
'build_lib', 'build_temp', 'compiler',
'debug', 'force', 'plat_name')
if self.package is None:
self.package = self.distribution.ext_package
# Ensure that the list of extensions is valid, i.e. it is a list of
# Extension objects.
self.extensions = self.distribution.ext_modules
if self.extensions:
if not isinstance(self.extensions, (list, tuple)):
type_name = (self.extensions is None and 'None'
or type(self.extensions).__name__)
raise PackagingSetupError(
"'ext_modules' must be a sequence of Extension instances,"
" not %s" % (type_name,))
for i, ext in enumerate(self.extensions):
if isinstance(ext, Extension):
continue # OK! (assume type-checking done
# by Extension constructor)
type_name = (ext is None and 'None' or type(ext).__name__)
raise PackagingSetupError(
"'ext_modules' item %d must be an Extension instance,"
" not %s" % (i, type_name))
# Make sure Python's include directories (for Python.h, pyconfig.h,
# etc.) are in the include search path.
py_include = sysconfig.get_path('include')
plat_py_include = sysconfig.get_path('platinclude')
if self.include_dirs is None:
self.include_dirs = self.distribution.include_dirs or []
if isinstance(self.include_dirs, str):
self.include_dirs = self.include_dirs.split(os.pathsep)
# Put the Python "system" include dir at the end, so that
# any local include dirs take precedence.
self.include_dirs.append(py_include)
if plat_py_include != py_include:
self.include_dirs.append(plat_py_include)
if isinstance(self.libraries, str):
self.libraries = [self.libraries]
# Life is easier if we're not forever checking for None, so
# simplify these options to empty lists if unset
if self.libraries is None:
self.libraries = []
if self.library_dirs is None:
self.library_dirs = []
elif isinstance(self.library_dirs, str):
self.library_dirs = self.library_dirs.split(os.pathsep)
if self.rpath is None:
self.rpath = []
elif isinstance(self.rpath, str):
self.rpath = self.rpath.split(os.pathsep)
# for extensions under windows use different directories
# for Release and Debug builds.
# also Python's library directory must be appended to library_dirs
if os.name == 'nt':
# the 'libs' directory is for binary installs - we assume that
# must be the *native* platform. But we don't really support
# cross-compiling via a binary install anyway, so we let it go.
self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs'))
if self.debug:
self.build_temp = os.path.join(self.build_temp, "Debug")
else:
self.build_temp = os.path.join(self.build_temp, "Release")
# Append the source distribution include and library directories,
# this allows distutils on windows to work in the source tree
self.include_dirs.append(os.path.join(sys.exec_prefix, 'PC'))
if MSVC_VERSION == 9:
# Use the .lib files for the correct architecture
if self.plat_name == 'win32':
suffix = ''
else:
# win-amd64 or win-ia64
suffix = self.plat_name[4:]
new_lib = os.path.join(sys.exec_prefix, 'PCbuild')
if suffix:
new_lib = os.path.join(new_lib, suffix)
self.library_dirs.append(new_lib)
elif MSVC_VERSION == 8:
self.library_dirs.append(os.path.join(sys.exec_prefix,
'PC', 'VS8.0'))
elif MSVC_VERSION == 7:
self.library_dirs.append(os.path.join(sys.exec_prefix,
'PC', 'VS7.1'))
else:
self.library_dirs.append(os.path.join(sys.exec_prefix,
'PC', 'VC6'))
# OS/2 (EMX) doesn't support Debug vs Release builds, but has the
# import libraries in its "Config" subdirectory
if os.name == 'os2':
self.library_dirs.append(os.path.join(sys.exec_prefix, 'Config'))
# for extensions under Cygwin and AtheOS Python's library directory must be
# appended to library_dirs
if sys.platform[:6] == 'cygwin' or sys.platform[:6] == 'atheos':
if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
# building third party extensions
self.library_dirs.append(os.path.join(sys.prefix, "lib",
"python" + sysconfig.get_python_version(),
"config"))
else:
# building python standard extensions
self.library_dirs.append(os.curdir)
# for extensions under Linux or Solaris with a shared Python library,
# Python's library directory must be appended to library_dirs
sysconfig.get_config_var('Py_ENABLE_SHARED')
if ((sys.platform.startswith('linux') or sys.platform.startswith('gnu')
or sys.platform.startswith('sunos'))
and sysconfig.get_config_var('Py_ENABLE_SHARED')):
if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
# building third party extensions
self.library_dirs.append(sysconfig.get_config_var('LIBDIR'))
else:
# building python standard extensions
self.library_dirs.append(os.curdir)
# The argument parsing will result in self.define being a string, but
# it has to be a list of 2-tuples. All the preprocessor symbols
# specified by the 'define' option will be set to '1'. Multiple
# symbols can be separated with commas.
if self.define:
defines = self.define.split(',')
self.define = [(symbol, '1') for symbol in defines]
# The option for macros to undefine is also a string from the
# option parsing, but has to be a list. Multiple symbols can also
# be separated with commas here.
if self.undef:
self.undef = self.undef.split(',')
if self.swig_opts is None:
self.swig_opts = []
else:
self.swig_opts = self.swig_opts.split(' ')
# Finally add the user include and library directories if requested
if HAS_USER_SITE and self.user:
user_include = os.path.join(site.USER_BASE, "include")
user_lib = os.path.join(site.USER_BASE, "lib")
if os.path.isdir(user_include):
self.include_dirs.append(user_include)
if os.path.isdir(user_lib):
self.library_dirs.append(user_lib)
self.rpath.append(user_lib)
def run(self):
from packaging.compiler import new_compiler
# 'self.extensions', as supplied by setup.py, is a list of
# Extension instances. See the documentation for Extension (in
# distutils.extension) for details.
if not self.extensions:
return
# If we were asked to build any C/C++ libraries, make sure that the
# directory where we put them is in the library search path for
# linking extensions.
if self.distribution.has_c_libraries():
build_clib = self.get_finalized_command('build_clib')
self.libraries.extend(build_clib.get_library_names() or [])
self.library_dirs.append(build_clib.build_clib)
# Temporary kludge until we remove the verbose arguments and use
# logging everywhere
verbose = logger.getEffectiveLevel() >= logging.DEBUG
# Setup the CCompiler object that we'll use to do all the
# compiling and linking
self.compiler_obj = new_compiler(compiler=self.compiler,
verbose=verbose,
dry_run=self.dry_run,
force=self.force)
customize_compiler(self.compiler_obj)
# If we are cross-compiling, init the compiler now (if we are not
# cross-compiling, init would not hurt, but people may rely on
# late initialization of compiler even if they shouldn't...)
if os.name == 'nt' and self.plat_name != get_platform():
self.compiler_obj.initialize(self.plat_name)
# And make sure that any compile/link-related options (which might
# come from the command line or from the setup script) are set in
# that CCompiler object -- that way, they automatically apply to
# all compiling and linking done here.
if self.include_dirs is not None:
self.compiler_obj.set_include_dirs(self.include_dirs)
if self.define is not None:
# 'define' option is a list of (name,value) tuples
for name, value in self.define:
self.compiler_obj.define_macro(name, value)
if self.undef is not None:
for macro in self.undef:
self.compiler_obj.undefine_macro(macro)
if self.libraries is not None:
self.compiler_obj.set_libraries(self.libraries)
if self.library_dirs is not None:
self.compiler_obj.set_library_dirs(self.library_dirs)
if self.rpath is not None:
self.compiler_obj.set_runtime_library_dirs(self.rpath)
if self.link_objects is not None:
self.compiler_obj.set_link_objects(self.link_objects)
# Now actually compile and link everything.
self.build_extensions()
def get_source_files(self):
filenames = []
# Wouldn't it be neat if we knew the names of header files too...
for ext in self.extensions:
filenames.extend(ext.sources)
return filenames
def get_outputs(self):
# And build the list of output (built) filenames. Note that this
# ignores the 'inplace' flag, and assumes everything goes in the
# "build" tree.
outputs = []
for ext in self.extensions:
outputs.append(self.get_ext_fullpath(ext.name))
return outputs
def build_extensions(self):
for ext in self.extensions:
try:
self.build_extension(ext)
except (CCompilerError, PackagingError, CompileError) as e:
if not ext.optional:
raise
logger.warning('%s: building extension %r failed: %s',
self.get_command_name(), ext.name, e)
def build_extension(self, ext):
sources = ext.sources
if sources is None or not isinstance(sources, (list, tuple)):
raise PackagingSetupError(("in 'ext_modules' option (extension '%s'), " +
"'sources' must be present and must be " +
"a list of source filenames") % ext.name)
sources = list(sources)
ext_path = self.get_ext_fullpath(ext.name)
depends = sources + ext.depends
if not (self.force or newer_group(depends, ext_path, 'newer')):
logger.debug("skipping '%s' extension (up-to-date)", ext.name)
return
else:
logger.info("building '%s' extension", ext.name)
# First, scan the sources for SWIG definition files (.i), run
# SWIG on 'em to create .c files, and modify the sources list
# accordingly.
sources = self.swig_sources(sources, ext)
# Next, compile the source code to object files.
# XXX not honouring 'define_macros' or 'undef_macros' -- the
# CCompiler API needs to change to accommodate this, and I
# want to do one thing at a time!
# Two possible sources for extra compiler arguments:
# - 'extra_compile_args' in Extension object
# - CFLAGS environment variable (not particularly
# elegant, but people seem to expect it and I
# guess it's useful)
# The environment variable should take precedence, and
# any sensible compiler will give precedence to later
# command-line args. Hence we combine them in order:
extra_args = ext.extra_compile_args or []
macros = ext.define_macros[:]
for undef in ext.undef_macros:
macros.append((undef,))
objects = self.compiler_obj.compile(sources,
output_dir=self.build_temp,
macros=macros,
include_dirs=ext.include_dirs,
debug=self.debug,
extra_postargs=extra_args,
depends=ext.depends)
# XXX -- this is a Vile HACK!
#
# The setup.py script for Python on Unix needs to be able to
# get this list so it can perform all the clean up needed to
# avoid keeping object files around when cleaning out a failed
# build of an extension module. Since Packaging does not
# track dependencies, we have to get rid of intermediates to
# ensure all the intermediates will be properly re-built.
#
self._built_objects = objects[:]
# Now link the object files together into a "shared object" --
# of course, first we have to figure out all the other things
# that go into the mix.
if ext.extra_objects:
objects.extend(ext.extra_objects)
extra_args = ext.extra_link_args or []
# Detect target language, if not provided
language = ext.language or self.compiler_obj.detect_language(sources)
self.compiler_obj.link_shared_object(
objects, ext_path,
libraries=self.get_libraries(ext),
library_dirs=ext.library_dirs,
runtime_library_dirs=ext.runtime_library_dirs,
extra_postargs=extra_args,
export_symbols=self.get_export_symbols(ext),
debug=self.debug,
build_temp=self.build_temp,
target_lang=language)
def swig_sources(self, sources, extension):
"""Walk the list of source files in 'sources', looking for SWIG
interface (.i) files. Run SWIG on all that are found, and
return a modified 'sources' list with SWIG source files replaced
by the generated C (or C++) files.
"""
new_sources = []
swig_sources = []
swig_targets = {}
# XXX this drops generated C/C++ files into the source tree, which
# is fine for developers who want to distribute the generated
# source -- but there should be an option to put SWIG output in
# the temp dir.
if ('-c++' in self.swig_opts or '-c++' in extension.swig_opts):
target_ext = '.cpp'
else:
target_ext = '.c'
for source in sources:
base, ext = os.path.splitext(source)
if ext == ".i": # SWIG interface file
new_sources.append(base + '_wrap' + target_ext)
swig_sources.append(source)
swig_targets[source] = new_sources[-1]
else:
new_sources.append(source)
if not swig_sources:
return new_sources
swig = self.swig or self.find_swig()
swig_cmd = [swig, "-python"]
swig_cmd.extend(self.swig_opts)
# Do not override commandline arguments
if not self.swig_opts:
for o in extension.swig_opts:
swig_cmd.append(o)
for source in swig_sources:
target = swig_targets[source]
logger.info("swigging %s to %s", source, target)
self.spawn(swig_cmd + ["-o", target, source])
return new_sources
def find_swig(self):
"""Return the name of the SWIG executable. On Unix, this is
just "swig" -- it should be in the PATH. Tries a bit harder on
Windows.
"""
if os.name == "posix":
return "swig"
elif os.name == "nt":
# Look for SWIG in its standard installation directory on
# Windows (or so I presume!). If we find it there, great;
# if not, act like Unix and assume it's in the PATH.
for vers in ("1.3", "1.2", "1.1"):
fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
if os.path.isfile(fn):
return fn
else:
return "swig.exe"
elif os.name == "os2":
# assume swig available in the PATH.
return "swig.exe"
else:
raise PackagingPlatformError(("I don't know how to find (much less run) SWIG "
"on platform '%s'") % os.name)
# -- Name generators -----------------------------------------------
# (extension names, filenames, whatever)
def get_ext_fullpath(self, ext_name):
"""Returns the path of the filename for a given extension.
The file is located in `build_lib` or directly in the package
(inplace option).
"""
fullname = self.get_ext_fullname(ext_name)
modpath = fullname.split('.')
filename = self.get_ext_filename(modpath[-1])
if not self.inplace:
# no further work needed
# returning :
# build_dir/package/path/filename
filename = os.path.join(*modpath[:-1]+[filename])
return os.path.join(self.build_lib, filename)
# the inplace option requires to find the package directory
# using the build_py command for that
package = '.'.join(modpath[0:-1])
build_py = self.get_finalized_command('build_py')
package_dir = os.path.abspath(build_py.get_package_dir(package))
# returning
# package_dir/filename
return os.path.join(package_dir, filename)
def get_ext_fullname(self, ext_name):
"""Returns the fullname of a given extension name.
Adds the `package.` prefix"""
if self.package is None:
return ext_name
else:
return self.package + '.' + ext_name
def get_ext_filename(self, ext_name):
r"""Convert the name of an extension (eg. "foo.bar") into the name
of the file from which it will be loaded (eg. "foo/bar.so", or
"foo\bar.pyd").
"""
ext_path = ext_name.split('.')
# OS/2 has an 8 character module (extension) limit :-(
if os.name == "os2":
ext_path[len(ext_path) - 1] = ext_path[len(ext_path) - 1][:8]
# extensions in debug_mode are named 'module_d.pyd' under windows
so_ext = sysconfig.get_config_var('SO')
if os.name == 'nt' and self.debug:
return os.path.join(*ext_path) + '_d' + so_ext
return os.path.join(*ext_path) + so_ext
def get_export_symbols(self, ext):
"""Return the list of symbols that a shared extension has to
export. This either uses 'ext.export_symbols' or, if it's not
provided, "init" + module_name. Only relevant on Windows, where
the .pyd file (DLL) must export the module "init" function.
"""
initfunc_name = "init" + ext.name.split('.')[-1]
if initfunc_name not in ext.export_symbols:
ext.export_symbols.append(initfunc_name)
return ext.export_symbols
def get_libraries(self, ext):
"""Return the list of libraries to link against when building a
shared extension. On most platforms, this is just 'ext.libraries';
on Windows and OS/2, we add the Python library (eg. python20.dll).
"""
# The python library is always needed on Windows. For MSVC, this
# is redundant, since the library is mentioned in a pragma in
# pyconfig.h that MSVC groks. The other Windows compilers all seem
# to need it mentioned explicitly, though, so that's what we do.
# Append '_d' to the python import library on debug builds.
if sys.platform == "win32":
from packaging.compiler.msvccompiler import MSVCCompiler
if not isinstance(self.compiler_obj, MSVCCompiler):
template = "python%d%d"
if self.debug:
template = template + '_d'
pythonlib = (template %
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
# don't extend ext.libraries, it may be shared with other
# extensions, it is a reference to the original list
return ext.libraries + [pythonlib]
else:
return ext.libraries
elif sys.platform == "os2emx":
# EMX/GCC requires the python library explicitly, and I
# believe VACPP does as well (though not confirmed) - AIM Apr01
template = "python%d%d"
# debug versions of the main DLL aren't supported, at least
# not at this time - AIM Apr01
#if self.debug:
# template = template + '_d'
pythonlib = (template %
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
# don't extend ext.libraries, it may be shared with other
# extensions, it is a reference to the original list
return ext.libraries + [pythonlib]
elif sys.platform[:6] == "cygwin":
template = "python%d.%d"
pythonlib = (template %
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
# don't extend ext.libraries, it may be shared with other
# extensions, it is a reference to the original list
return ext.libraries + [pythonlib]
elif sys.platform[:6] == "atheos":
template = "python%d.%d"
pythonlib = (template %
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
# Get SHLIBS from Makefile
extra = []
for lib in sysconfig.get_config_var('SHLIBS').split():
if lib.startswith('-l'):
extra.append(lib[2:])
else:
extra.append(lib)
# don't extend ext.libraries, it may be shared with other
# extensions, it is a reference to the original list
return ext.libraries + [pythonlib, "m"] + extra
elif sys.platform == 'darwin':
# Don't use the default code below
return ext.libraries
else:
if sysconfig.get_config_var('Py_ENABLE_SHARED'):
template = "python%d.%d"
pythonlib = (template %
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
return ext.libraries + [pythonlib]
else:
return ext.libraries

View file

@ -0,0 +1,410 @@
"""Build pure Python modules (just copy to build directory)."""
import os
import sys
from glob import glob
from packaging import logger
from packaging.command.cmd import Command
from packaging.errors import PackagingOptionError, PackagingFileError
from packaging.util import convert_path
from packaging.compat import Mixin2to3
# marking public APIs
__all__ = ['build_py']
class build_py(Command, Mixin2to3):
description = "build pure Python modules (copy to build directory)"
user_options = [
('build-lib=', 'd', "directory to build (copy) to"),
('compile', 'c', "compile .py to .pyc"),
('no-compile', None, "don't compile .py files [default]"),
('optimize=', 'O',
"also compile with optimization: -O1 for \"python -O\", "
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
('force', 'f', "forcibly build everything (ignore file timestamps)"),
('use-2to3', None,
"use 2to3 to make source python 3.x compatible"),
('convert-2to3-doctests', None,
"use 2to3 to convert doctests in seperate text files"),
('use-2to3-fixers', None,
"list additional fixers opted for during 2to3 conversion"),
]
boolean_options = ['compile', 'force']
negative_opt = {'no-compile' : 'compile'}
def initialize_options(self):
self.build_lib = None
self.py_modules = None
self.package = None
self.package_data = None
self.package_dir = None
self.compile = False
self.optimize = 0
self.force = None
self._updated_files = []
self._doctests_2to3 = []
self.use_2to3 = False
self.convert_2to3_doctests = None
self.use_2to3_fixers = None
def finalize_options(self):
self.set_undefined_options('build',
'use_2to3', 'use_2to3_fixers',
'convert_2to3_doctests', 'build_lib',
'force')
# Get the distribution options that are aliases for build_py
# options -- list of packages and list of modules.
self.packages = self.distribution.packages
self.py_modules = self.distribution.py_modules
self.package_data = self.distribution.package_data
self.package_dir = None
if self.distribution.package_dir is not None:
self.package_dir = convert_path(self.distribution.package_dir)
self.data_files = self.get_data_files()
# Ick, copied straight from install_lib.py (fancy_getopt needs a
# type system! Hell, *everything* needs a type system!!!)
if not isinstance(self.optimize, int):
try:
self.optimize = int(self.optimize)
assert 0 <= self.optimize <= 2
except (ValueError, AssertionError):
raise PackagingOptionError("optimize must be 0, 1, or 2")
def run(self):
# XXX copy_file by default preserves atime and mtime. IMHO this is
# the right thing to do, but perhaps it should be an option -- in
# particular, a site administrator might want installed files to
# reflect the time of installation rather than the last
# modification time before the installed release.
# XXX copy_file by default preserves mode, which appears to be the
# wrong thing to do: if a file is read-only in the working
# directory, we want it to be installed read/write so that the next
# installation of the same module distribution can overwrite it
# without problems. (This might be a Unix-specific issue.) Thus
# we turn off 'preserve_mode' when copying to the build directory,
# since the build directory is supposed to be exactly what the
# installation will look like (ie. we preserve mode when
# installing).
# Two options control which modules will be installed: 'packages'
# and 'py_modules'. The former lets us work with whole packages, not
# specifying individual modules at all; the latter is for
# specifying modules one-at-a-time.
if self.py_modules:
self.build_modules()
if self.packages:
self.build_packages()
self.build_package_data()
if self.use_2to3 and self._updated_files:
self.run_2to3(self._updated_files, self._doctests_2to3,
self.use_2to3_fixers)
self.byte_compile(self.get_outputs(include_bytecode=False))
# -- Top-level worker functions ------------------------------------
def get_data_files(self):
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples.
Helper function for `finalize_options()`.
"""
data = []
if not self.packages:
return data
for package in self.packages:
# Locate package source directory
src_dir = self.get_package_dir(package)
# Compute package build directory
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
# Length of path to strip from found files
plen = 0
if src_dir:
plen = len(src_dir)+1
# Strip directory from globbed filenames
filenames = [
file[plen:] for file in self.find_data_files(package, src_dir)
]
data.append((package, src_dir, build_dir, filenames))
return data
def find_data_files(self, package, src_dir):
"""Return filenames for package's data files in 'src_dir'.
Helper function for `get_data_files()`.
"""
globs = (self.package_data.get('', [])
+ self.package_data.get(package, []))
files = []
for pattern in globs:
# Each pattern has to be converted to a platform-specific path
filelist = glob(os.path.join(src_dir, convert_path(pattern)))
# Files that match more than one pattern are only added once
files.extend(fn for fn in filelist if fn not in files)
return files
def build_package_data(self):
"""Copy data files into build directory.
Helper function for `run()`.
"""
# FIXME add tests for this method
for package, src_dir, build_dir, filenames in self.data_files:
for filename in filenames:
target = os.path.join(build_dir, filename)
srcfile = os.path.join(src_dir, filename)
self.mkpath(os.path.dirname(target))
outf, copied = self.copy_file(srcfile,
target, preserve_mode=False)
if copied and srcfile in self.distribution.convert_2to3.doctests:
self._doctests_2to3.append(outf)
# XXX - this should be moved to the Distribution class as it is not
# only needed for build_py. It also has no dependencies on this class.
def get_package_dir(self, package):
"""Return the directory, relative to the top of the source
distribution, where package 'package' should be found
(at least according to the 'package_dir' option, if any)."""
path = package.split('.')
if self.package_dir is not None:
path.insert(0, self.package_dir)
if len(path) > 0:
return os.path.join(*path)
return ''
def check_package(self, package, package_dir):
"""Helper function for `find_package_modules()` and `find_modules()'.
"""
# Empty dir name means current directory, which we can probably
# assume exists. Also, os.path.exists and isdir don't know about
# my "empty string means current dir" convention, so we have to
# circumvent them.
if package_dir != "":
if not os.path.exists(package_dir):
raise PackagingFileError(
"package directory '%s' does not exist" % package_dir)
if not os.path.isdir(package_dir):
raise PackagingFileError(
"supposed package directory '%s' exists, "
"but is not a directory" % package_dir)
# Require __init__.py for all but the "root package"
if package:
init_py = os.path.join(package_dir, "__init__.py")
if os.path.isfile(init_py):
return init_py
else:
logger.warning(("package init file '%s' not found " +
"(or not a regular file)"), init_py)
# Either not in a package at all (__init__.py not expected), or
# __init__.py doesn't exist -- so don't return the filename.
return None
def check_module(self, module, module_file):
if not os.path.isfile(module_file):
logger.warning("file %s (for module %s) not found",
module_file, module)
return False
else:
return True
def find_package_modules(self, package, package_dir):
self.check_package(package, package_dir)
module_files = glob(os.path.join(package_dir, "*.py"))
modules = []
if self.distribution.script_name is not None:
setup_script = os.path.abspath(self.distribution.script_name)
else:
setup_script = None
for f in module_files:
abs_f = os.path.abspath(f)
if abs_f != setup_script:
module = os.path.splitext(os.path.basename(f))[0]
modules.append((package, module, f))
else:
logger.debug("excluding %s", setup_script)
return modules
def find_modules(self):
"""Finds individually-specified Python modules, ie. those listed by
module name in 'self.py_modules'. Returns a list of tuples (package,
module_base, filename): 'package' is a tuple of the path through
package-space to the module; 'module_base' is the bare (no
packages, no dots) module name, and 'filename' is the path to the
".py" file (relative to the distribution root) that implements the
module.
"""
# Map package names to tuples of useful info about the package:
# (package_dir, checked)
# package_dir - the directory where we'll find source files for
# this package
# checked - true if we have checked that the package directory
# is valid (exists, contains __init__.py, ... ?)
packages = {}
# List of (package, module, filename) tuples to return
modules = []
# We treat modules-in-packages almost the same as toplevel modules,
# just the "package" for a toplevel is empty (either an empty
# string or empty list, depending on context). Differences:
# - don't check for __init__.py in directory for empty package
for module in self.py_modules:
path = module.split('.')
package = '.'.join(path[0:-1])
module_base = path[-1]
try:
package_dir, checked = packages[package]
except KeyError:
package_dir = self.get_package_dir(package)
checked = False
if not checked:
init_py = self.check_package(package, package_dir)
packages[package] = (package_dir, 1)
if init_py:
modules.append((package, "__init__", init_py))
# XXX perhaps we should also check for just .pyc files
# (so greedy closed-source bastards can distribute Python
# modules too)
module_file = os.path.join(package_dir, module_base + ".py")
if not self.check_module(module, module_file):
continue
modules.append((package, module_base, module_file))
return modules
def find_all_modules(self):
"""Compute the list of all modules that will be built, whether
they are specified one-module-at-a-time ('self.py_modules') or
by whole packages ('self.packages'). Return a list of tuples
(package, module, module_file), just like 'find_modules()' and
'find_package_modules()' do."""
modules = []
if self.py_modules:
modules.extend(self.find_modules())
if self.packages:
for package in self.packages:
package_dir = self.get_package_dir(package)
m = self.find_package_modules(package, package_dir)
modules.extend(m)
return modules
def get_source_files(self):
sources = [module[-1] for module in self.find_all_modules()]
sources += [
os.path.join(src_dir, filename)
for package, src_dir, build_dir, filenames in self.data_files
for filename in filenames]
return sources
def get_module_outfile(self, build_dir, package, module):
outfile_path = [build_dir] + list(package) + [module + ".py"]
return os.path.join(*outfile_path)
def get_outputs(self, include_bytecode=True):
modules = self.find_all_modules()
outputs = []
for package, module, module_file in modules:
package = package.split('.')
filename = self.get_module_outfile(self.build_lib, package, module)
outputs.append(filename)
if include_bytecode:
if self.compile:
outputs.append(filename + "c")
if self.optimize > 0:
outputs.append(filename + "o")
outputs += [
os.path.join(build_dir, filename)
for package, src_dir, build_dir, filenames in self.data_files
for filename in filenames]
return outputs
def build_module(self, module, module_file, package):
if isinstance(package, str):
package = package.split('.')
elif not isinstance(package, (list, tuple)):
raise TypeError(
"'package' must be a string (dot-separated), list, or tuple")
# Now put the module source file into the "build" area -- this is
# easy, we just copy it somewhere under self.build_lib (the build
# directory for Python source).
outfile = self.get_module_outfile(self.build_lib, package, module)
dir = os.path.dirname(outfile)
self.mkpath(dir)
return self.copy_file(module_file, outfile, preserve_mode=False)
def build_modules(self):
modules = self.find_modules()
for package, module, module_file in modules:
# Now "build" the module -- ie. copy the source file to
# self.build_lib (the build directory for Python source).
# (Actually, it gets copied to the directory for this package
# under self.build_lib.)
self.build_module(module, module_file, package)
def build_packages(self):
for package in self.packages:
# Get list of (package, module, module_file) tuples based on
# scanning the package directory. 'package' is only included
# in the tuple so that 'find_modules()' and
# 'find_package_tuples()' have a consistent interface; it's
# ignored here (apart from a sanity check). Also, 'module' is
# the *unqualified* module name (ie. no dots, no package -- we
# already know its package!), and 'module_file' is the path to
# the .py file, relative to the current directory
# (ie. including 'package_dir').
package_dir = self.get_package_dir(package)
modules = self.find_package_modules(package, package_dir)
# Now loop over the modules we found, "building" each one (just
# copy it to self.build_lib).
for package_, module, module_file in modules:
assert package == package_
self.build_module(module, module_file, package)
def byte_compile(self, files):
if hasattr(sys, 'dont_write_bytecode') and sys.dont_write_bytecode:
logger.warning('%s: byte-compiling is disabled, skipping.',
self.get_command_name())
return
from packaging.util import byte_compile
prefix = self.build_lib
if prefix[-1] != os.sep:
prefix = prefix + os.sep
# XXX this code is essentially the same as the 'byte_compile()
# method of the "install_lib" command, except for the determination
# of the 'prefix' string. Hmmm.
if self.compile:
byte_compile(files, optimize=0,
force=self.force, prefix=prefix, dry_run=self.dry_run)
if self.optimize > 0:
byte_compile(files, optimize=self.optimize,
force=self.force, prefix=prefix, dry_run=self.dry_run)

View file

@ -0,0 +1,132 @@
"""Build scripts (copy to build dir and fix up shebang line)."""
import os
import re
import sysconfig
from packaging.command.cmd import Command
from packaging.util import convert_path, newer
from packaging import logger
from packaging.compat import Mixin2to3
# check if Python is called on the first line with this expression
first_line_re = re.compile('^#!.*python[0-9.]*([ \t].*)?$')
class build_scripts(Command, Mixin2to3):
description = "build scripts (copy and fix up shebang line)"
user_options = [
('build-dir=', 'd', "directory to build (copy) to"),
('force', 'f', "forcibly build everything (ignore file timestamps"),
('executable=', 'e', "specify final destination interpreter path"),
]
boolean_options = ['force']
def initialize_options(self):
self.build_dir = None
self.scripts = None
self.force = None
self.executable = None
self.outfiles = None
self.use_2to3 = False
self.convert_2to3_doctests = None
self.use_2to3_fixers = None
def finalize_options(self):
self.set_undefined_options('build',
('build_scripts', 'build_dir'),
'use_2to3', 'use_2to3_fixers',
'convert_2to3_doctests', 'force',
'executable')
self.scripts = self.distribution.scripts
def get_source_files(self):
return self.scripts
def run(self):
if not self.scripts:
return
copied_files = self.copy_scripts()
if self.use_2to3 and copied_files:
self._run_2to3(copied_files, fixers=self.use_2to3_fixers)
def copy_scripts(self):
"""Copy each script listed in 'self.scripts'; if it's marked as a
Python script in the Unix way (first line matches 'first_line_re',
ie. starts with "\#!" and contains "python"), then adjust the first
line to refer to the current Python interpreter as we copy.
"""
self.mkpath(self.build_dir)
outfiles = []
for script in self.scripts:
adjust = False
script = convert_path(script)
outfile = os.path.join(self.build_dir, os.path.basename(script))
outfiles.append(outfile)
if not self.force and not newer(script, outfile):
logger.debug("not copying %s (up-to-date)", script)
continue
# Always open the file, but ignore failures in dry-run mode --
# that way, we'll get accurate feedback if we can read the
# script.
try:
f = open(script, "r")
except IOError:
if not self.dry_run:
raise
f = None
else:
first_line = f.readline()
if not first_line:
logger.warning('%s: %s is an empty file (skipping)',
self.get_command_name(), script)
continue
match = first_line_re.match(first_line)
if match:
adjust = True
post_interp = match.group(1) or ''
if adjust:
logger.info("copying and adjusting %s -> %s", script,
self.build_dir)
if not self.dry_run:
outf = open(outfile, "w")
if not sysconfig.is_python_build():
outf.write("#!%s%s\n" %
(self.executable,
post_interp))
else:
outf.write("#!%s%s\n" %
(os.path.join(
sysconfig.get_config_var("BINDIR"),
"python%s%s" % (sysconfig.get_config_var("VERSION"),
sysconfig.get_config_var("EXE"))),
post_interp))
outf.writelines(f.readlines())
outf.close()
if f:
f.close()
else:
if f:
f.close()
self.copy_file(script, outfile)
if os.name == 'posix':
for file in outfiles:
if self.dry_run:
logger.info("changing mode of %s", file)
else:
oldmode = os.stat(file).st_mode & 0o7777
newmode = (oldmode | 0o555) & 0o7777
if newmode != oldmode:
logger.info("changing mode of %s from %o to %o",
file, oldmode, newmode)
os.chmod(file, newmode)
return outfiles

View file

@ -0,0 +1,88 @@
"""Check PEP compliance of metadata."""
from packaging import logger
from packaging.command.cmd import Command
from packaging.errors import PackagingSetupError
from packaging.util import resolve_name
class check(Command):
description = "check PEP compliance of metadata"
user_options = [('metadata', 'm', 'Verify metadata'),
('all', 'a',
('runs extended set of checks')),
('strict', 's',
'Will exit with an error if a check fails')]
boolean_options = ['metadata', 'all', 'strict']
def initialize_options(self):
"""Sets default values for options."""
self.all = False
self.metadata = True
self.strict = False
self._warnings = []
def finalize_options(self):
pass
def warn(self, msg, *args):
"""Wrapper around logging that also remembers messages."""
# XXX we could use a special handler for this, but would need to test
# if it works even if the logger has a too high level
self._warnings.append((msg, args))
return logger.warning(self.get_command_name() + msg, *args)
def run(self):
"""Runs the command."""
# perform the various tests
if self.metadata:
self.check_metadata()
if self.all:
self.check_restructuredtext()
self.check_hooks_resolvable()
# let's raise an error in strict mode, if we have at least
# one warning
if self.strict and len(self._warnings) > 0:
msg = '\n'.join(msg % args for msg, args in self._warnings)
raise PackagingSetupError(msg)
def check_metadata(self):
"""Ensures that all required elements of metadata are supplied.
name, version, URL, author
Warns if any are missing.
"""
missing, warnings = self.distribution.metadata.check(strict=True)
if missing != []:
self.warn('missing required metadata: %s', ', '.join(missing))
for warning in warnings:
self.warn(warning)
def check_restructuredtext(self):
"""Checks if the long string fields are reST-compliant."""
missing, warnings = self.distribution.metadata.check(restructuredtext=True)
if self.distribution.metadata.docutils_support:
for warning in warnings:
line = warning[-1].get('line')
if line is None:
warning = warning[1]
else:
warning = '%s (line %s)' % (warning[1], line)
self.warn(warning)
elif self.strict:
raise PackagingSetupError('The docutils package is needed.')
def check_hooks_resolvable(self):
for options in self.distribution.command_options.values():
for hook_kind in ("pre_hook", "post_hook"):
if hook_kind not in options:
break
for hook_name in options[hook_kind][1].values():
try:
resolve_name(hook_name)
except ImportError:
self.warn('name %r cannot be resolved', hook_name)

View file

@ -0,0 +1,76 @@
"""Clean up temporary files created by the build command."""
# Contributed by Bastian Kleineidam <calvin@cs.uni-sb.de>
import os
from shutil import rmtree
from packaging.command.cmd import Command
from packaging import logger
class clean(Command):
description = "clean up temporary files from 'build' command"
user_options = [
('build-base=', 'b',
"base build directory (default: 'build.build-base')"),
('build-lib=', None,
"build directory for all modules (default: 'build.build-lib')"),
('build-temp=', 't',
"temporary build directory (default: 'build.build-temp')"),
('build-scripts=', None,
"build directory for scripts (default: 'build.build-scripts')"),
('bdist-base=', None,
"temporary directory for built distributions"),
('all', 'a',
"remove all build output, not just temporary by-products")
]
boolean_options = ['all']
def initialize_options(self):
self.build_base = None
self.build_lib = None
self.build_temp = None
self.build_scripts = None
self.bdist_base = None
self.all = None
def finalize_options(self):
self.set_undefined_options('build', 'build_base', 'build_lib',
'build_scripts', 'build_temp')
self.set_undefined_options('bdist', 'bdist_base')
def run(self):
# remove the build/temp.<plat> directory (unless it's already
# gone)
if os.path.exists(self.build_temp):
if self.dry_run:
logger.info('removing %s', self.build_temp)
else:
rmtree(self.build_temp)
else:
logger.debug("'%s' does not exist -- can't clean it",
self.build_temp)
if self.all:
# remove build directories
for directory in (self.build_lib,
self.bdist_base,
self.build_scripts):
if os.path.exists(directory):
if self.dry_run:
logger.info('removing %s', directory)
else:
rmtree(directory)
else:
logger.warning("'%s' does not exist -- can't clean it",
directory)
# just for the heck of it, try to remove the base build directory:
# we might have emptied it right now, but if not we don't care
if not self.dry_run:
try:
os.rmdir(self.build_base)
logger.info("removing '%s'", self.build_base)
except OSError:
pass

View file

@ -0,0 +1,440 @@
"""Base class for commands."""
import os
import re
from shutil import copyfile, move, make_archive
from packaging import util
from packaging import logger
from packaging.errors import PackagingOptionError
class Command:
"""Abstract base class for defining command classes, the "worker bees"
of the Packaging. A useful analogy for command classes is to think of
them as subroutines with local variables called "options". The options
are "declared" in 'initialize_options()' and "defined" (given their
final values, aka "finalized") in 'finalize_options()', both of which
must be defined by every command class. The distinction between the
two is necessary because option values might come from the outside
world (command line, config file, ...), and any options dependent on
other options must be computed *after* these outside influences have
been processed -- hence 'finalize_options()'. The "body" of the
subroutine, where it does all its work based on the values of its
options, is the 'run()' method, which must also be implemented by every
command class.
"""
# 'sub_commands' formalizes the notion of a "family" of commands,
# eg. "install_dist" as the parent with sub-commands "install_lib",
# "install_headers", etc. The parent of a family of commands
# defines 'sub_commands' as a class attribute; it's a list of
# (command_name : string, predicate : unbound_method | string | None)
# tuples, where 'predicate' is a method of the parent command that
# determines whether the corresponding command is applicable in the
# current situation. (Eg. we "install_headers" is only applicable if
# we have any C header files to install.) If 'predicate' is None,
# that command is always applicable.
#
# 'sub_commands' is usually defined at the *end* of a class, because
# predicates can be unbound methods, so they must already have been
# defined. The canonical example is the "install_dist" command.
sub_commands = []
# Pre and post command hooks are run just before or just after the command
# itself. They are simple functions that receive the command instance. They
# are specified as callable objects or dotted strings (for lazy loading).
pre_hook = None
post_hook = None
# -- Creation/initialization methods -------------------------------
def __init__(self, dist):
"""Create and initialize a new Command object. Most importantly,
invokes the 'initialize_options()' method, which is the real
initializer and depends on the actual command being instantiated.
"""
# late import because of mutual dependence between these classes
from packaging.dist import Distribution
if not isinstance(dist, Distribution):
raise TypeError("dist must be a Distribution instance")
if self.__class__ is Command:
raise RuntimeError("Command is an abstract class")
self.distribution = dist
self.initialize_options()
# Per-command versions of the global flags, so that the user can
# customize Packaging' behaviour command-by-command and let some
# commands fall back on the Distribution's behaviour. None means
# "not defined, check self.distribution's copy", while 0 or 1 mean
# false and true (duh). Note that this means figuring out the real
# value of each flag is a touch complicated -- hence "self._dry_run"
# will be handled by a property, below.
# XXX This needs to be fixed. [I changed it to a property--does that
# "fix" it?]
self._dry_run = None
# Some commands define a 'self.force' option to ignore file
# timestamps, but methods defined *here* assume that
# 'self.force' exists for all commands. So define it here
# just to be safe.
self.force = None
# The 'help' flag is just used for command line parsing, so
# none of that complicated bureaucracy is needed.
self.help = False
# 'finalized' records whether or not 'finalize_options()' has been
# called. 'finalize_options()' itself should not pay attention to
# this flag: it is the business of 'ensure_finalized()', which
# always calls 'finalize_options()', to respect/update it.
self.finalized = False
# XXX A more explicit way to customize dry_run would be better.
@property
def dry_run(self):
if self._dry_run is None:
return getattr(self.distribution, 'dry_run')
else:
return self._dry_run
def ensure_finalized(self):
if not self.finalized:
self.finalize_options()
self.finalized = True
# Subclasses must define:
# initialize_options()
# provide default values for all options; may be customized by
# setup script, by options from config file(s), or by command-line
# options
# finalize_options()
# decide on the final values for all options; this is called
# after all possible intervention from the outside world
# (command line, option file, etc.) has been processed
# run()
# run the command: do whatever it is we're here to do,
# controlled by the command's various option values
def initialize_options(self):
"""Set default values for all the options that this command
supports. Note that these defaults may be overridden by other
commands, by the setup script, by config files, or by the
command line. Thus, this is not the place to code dependencies
between options; generally, 'initialize_options()' implementations
are just a bunch of "self.foo = None" assignments.
This method must be implemented by all command classes.
"""
raise RuntimeError(
"abstract method -- subclass %s must override" % self.__class__)
def finalize_options(self):
"""Set final values for all the options that this command supports.
This is always called as late as possible, ie. after any option
assignments from the command line or from other commands have been
done. Thus, this is the place to code option dependencies: if
'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as
long as 'foo' still has the same value it was assigned in
'initialize_options()'.
This method must be implemented by all command classes.
"""
raise RuntimeError(
"abstract method -- subclass %s must override" % self.__class__)
def dump_options(self, header=None, indent=""):
if header is None:
header = "command options for '%s':" % self.get_command_name()
logger.info(indent + header)
indent = indent + " "
negative_opt = getattr(self, 'negative_opt', ())
for option, _, _ in self.user_options:
if option in negative_opt:
continue
option = option.replace('-', '_')
if option[-1] == "=":
option = option[:-1]
value = getattr(self, option)
logger.info(indent + "%s = %s", option, value)
def run(self):
"""A command's raison d'etre: carry out the action it exists to
perform, controlled by the options initialized in
'initialize_options()', customized by other commands, the setup
script, the command line and config files, and finalized in
'finalize_options()'. All terminal output and filesystem
interaction should be done by 'run()'.
This method must be implemented by all command classes.
"""
raise RuntimeError(
"abstract method -- subclass %s must override" % self.__class__)
# -- External interface --------------------------------------------
# (called by outsiders)
def get_source_files(self):
"""Return the list of files that are used as inputs to this command,
i.e. the files used to generate the output files. The result is used
by the `sdist` command in determining the set of default files.
Command classes should implement this method if they operate on files
from the source tree.
"""
return []
def get_outputs(self):
"""Return the list of files that would be produced if this command
were actually run. Not affected by the "dry-run" flag or whether
any other commands have been run.
Command classes should implement this method if they produce any
output files that get consumed by another command. e.g., `build_ext`
returns the list of built extension modules, but not any temporary
files used in the compilation process.
"""
return []
# -- Option validation methods -------------------------------------
# (these are very handy in writing the 'finalize_options()' method)
#
# NB. the general philosophy here is to ensure that a particular option
# value meets certain type and value constraints. If not, we try to
# force it into conformance (eg. if we expect a list but have a string,
# split the string on comma and/or whitespace). If we can't force the
# option into conformance, raise PackagingOptionError. Thus, command
# classes need do nothing more than (eg.)
# self.ensure_string_list('foo')
# and they can be guaranteed that thereafter, self.foo will be
# a list of strings.
def _ensure_stringlike(self, option, what, default=None):
val = getattr(self, option)
if val is None:
setattr(self, option, default)
return default
elif not isinstance(val, str):
raise PackagingOptionError("'%s' must be a %s (got `%s`)" %
(option, what, val))
return val
def ensure_string(self, option, default=None):
"""Ensure that 'option' is a string; if not defined, set it to
'default'.
"""
self._ensure_stringlike(option, "string", default)
def ensure_string_list(self, option):
r"""Ensure that 'option' is a list of strings. If 'option' is
currently a string, we split it either on /,\s*/ or /\s+/, so
"foo bar baz", "foo,bar,baz", and "foo, bar baz" all become
["foo", "bar", "baz"].
"""
val = getattr(self, option)
if val is None:
return
elif isinstance(val, str):
setattr(self, option, re.split(r',\s*|\s+', val))
else:
if isinstance(val, list):
# checks if all elements are str
ok = True
for element in val:
if not isinstance(element, str):
ok = False
break
else:
ok = False
if not ok:
raise PackagingOptionError(
"'%s' must be a list of strings (got %r)" % (option, val))
def _ensure_tested_string(self, option, tester,
what, error_fmt, default=None):
val = self._ensure_stringlike(option, what, default)
if val is not None and not tester(val):
raise PackagingOptionError(
("error in '%s' option: " + error_fmt) % (option, val))
def ensure_filename(self, option):
"""Ensure that 'option' is the name of an existing file."""
self._ensure_tested_string(option, os.path.isfile,
"filename",
"'%s' does not exist or is not a file")
def ensure_dirname(self, option):
self._ensure_tested_string(option, os.path.isdir,
"directory name",
"'%s' does not exist or is not a directory")
# -- Convenience methods for commands ------------------------------
@classmethod
def get_command_name(cls):
if hasattr(cls, 'command_name'):
return cls.command_name
else:
return cls.__name__
def set_undefined_options(self, src_cmd, *options):
"""Set values of undefined options from another command.
Undefined options are options set to None, which is the convention
used to indicate that an option has not been changed between
'initialize_options()' and 'finalize_options()'. This method is
usually called from 'finalize_options()' for options that depend on
some other command rather than another option of the same command,
typically subcommands.
The 'src_cmd' argument is the other command from which option values
will be taken (a command object will be created for it if necessary);
the remaining positional arguments are strings that give the name of
the option to set. If the name is different on the source and target
command, you can pass a tuple with '(name_on_source, name_on_dest)' so
that 'self.name_on_dest' will be set from 'src_cmd.name_on_source'.
"""
src_cmd_obj = self.distribution.get_command_obj(src_cmd)
src_cmd_obj.ensure_finalized()
for obj in options:
if isinstance(obj, tuple):
src_option, dst_option = obj
else:
src_option, dst_option = obj, obj
if getattr(self, dst_option) is None:
setattr(self, dst_option,
getattr(src_cmd_obj, src_option))
def get_finalized_command(self, command, create=True):
"""Wrapper around Distribution's 'get_command_obj()' method: find
(create if necessary and 'create' is true) the command object for
'command', call its 'ensure_finalized()' method, and return the
finalized command object.
"""
cmd_obj = self.distribution.get_command_obj(command, create)
cmd_obj.ensure_finalized()
return cmd_obj
def get_reinitialized_command(self, command, reinit_subcommands=False):
return self.distribution.get_reinitialized_command(
command, reinit_subcommands)
def run_command(self, command):
"""Run some other command: uses the 'run_command()' method of
Distribution, which creates and finalizes the command object if
necessary and then invokes its 'run()' method.
"""
self.distribution.run_command(command)
def get_sub_commands(self):
"""Determine the sub-commands that are relevant in the current
distribution (ie., that need to be run). This is based on the
'sub_commands' class attribute: each tuple in that list may include
a method that we call to determine if the subcommand needs to be
run for the current distribution. Return a list of command names.
"""
commands = []
for sub_command in self.sub_commands:
if len(sub_command) == 2:
cmd_name, method = sub_command
if method is None or method(self):
commands.append(cmd_name)
else:
commands.append(sub_command)
return commands
# -- External world manipulation -----------------------------------
def execute(self, func, args, msg=None, level=1):
util.execute(func, args, msg, dry_run=self.dry_run)
def mkpath(self, name, mode=0o777, dry_run=None, verbose=0):
if dry_run is None:
dry_run = self.dry_run
name = os.path.normpath(name)
if os.path.isdir(name) or name == '':
return
if dry_run:
head = ''
for part in name.split(os.sep):
logger.info("created directory %s%s", head, part)
head += part + os.sep
return
os.makedirs(name, mode)
def copy_file(self, infile, outfile,
preserve_mode=True, preserve_times=True, link=None, level=1):
"""Copy a file respecting verbose, dry-run and force flags. (The
former two default to whatever is in the Distribution object, and
the latter defaults to false for commands that don't define it.)"""
if self.dry_run:
# XXX add a comment
return
if os.path.isdir(outfile):
outfile = os.path.join(outfile, os.path.split(infile)[-1])
copyfile(infile, outfile)
return outfile, None # XXX
def copy_tree(self, infile, outfile, preserve_mode=True,
preserve_times=True, preserve_symlinks=False, level=1):
"""Copy an entire directory tree respecting verbose, dry-run,
and force flags.
"""
if self.dry_run:
return # see if we want to display something
return util.copy_tree(infile, outfile, preserve_mode, preserve_times,
preserve_symlinks, not self.force, dry_run=self.dry_run)
def move_file(self, src, dst, level=1):
"""Move a file respecting the dry-run flag."""
if self.dry_run:
return # XXX log ?
return move(src, dst)
def spawn(self, cmd, search_path=True, level=1):
"""Spawn an external command respecting dry-run flag."""
from packaging.util import spawn
spawn(cmd, search_path, dry_run=self.dry_run)
def make_archive(self, base_name, format, root_dir=None, base_dir=None,
owner=None, group=None):
return make_archive(base_name, format, root_dir,
base_dir, dry_run=self.dry_run,
owner=owner, group=group)
def make_file(self, infiles, outfile, func, args,
exec_msg=None, skip_msg=None, level=1):
"""Special case of 'execute()' for operations that process one or
more input files and generate one output file. Works just like
'execute()', except the operation is skipped and a different
message printed if 'outfile' already exists and is newer than all
files listed in 'infiles'. If the command defined 'self.force',
and it is true, then the command is unconditionally run -- does no
timestamp checks.
"""
if skip_msg is None:
skip_msg = "skipping %s (inputs unchanged)" % outfile
# Allow 'infiles' to be a single string
if isinstance(infiles, str):
infiles = (infiles,)
elif not isinstance(infiles, (list, tuple)):
raise TypeError(
"'infiles' must be a string, or a list or tuple of strings")
if exec_msg is None:
exec_msg = "generating %s from %s" % (outfile, ', '.join(infiles))
# If 'outfile' must be regenerated (either because it doesn't
# exist, is out-of-date, or the 'force' flag is true) then
# perform the action that presumably regenerates it
if self.force or util.newer_group(infiles, outfile):
self.execute(func, args, exec_msg, level)
# Otherwise, print the "skip" message
else:
logger.debug(skip_msg)

View file

@ -0,0 +1,35 @@
"""Do X and Y."""
from packaging import logger
from packaging.command.cmd import Command
class x(Command):
# Brief (40-50 characters) description of the command
description = ""
# List of option tuples: long name, short name (None if no short
# name), and help string.
user_options = [
('', '', # long option, short option (one letter) or None
""), # help text
]
def initialize_options(self):
self. = None
self. = None
self. = None
def finalize_options(self):
if self.x is None:
self.x = ...
def run(self):
...
logger.info(...)
if not self.dry_run:
...
self.execute(..., dry_run=self.dry_run)

View file

@ -0,0 +1,351 @@
"""Prepare the build.
This module provides config, a (mostly) empty command class
that exists mainly to be sub-classed by specific module distributions and
applications. The idea is that while every "config" command is different,
at least they're all named the same, and users always see "config" in the
list of standard commands. Also, this is a good place to put common
configure-like tasks: "try to compile this C code", or "figure out where
this header file lives".
"""
import os
import re
from packaging.command.cmd import Command
from packaging.errors import PackagingExecError
from packaging.compiler import customize_compiler
from packaging import logger
LANG_EXT = {'c': '.c', 'c++': '.cxx'}
class config(Command):
description = "prepare the build"
user_options = [
('compiler=', None,
"specify the compiler type"),
('cc=', None,
"specify the compiler executable"),
('include-dirs=', 'I',
"list of directories to search for header files"),
('define=', 'D',
"C preprocessor macros to define"),
('undef=', 'U',
"C preprocessor macros to undefine"),
('libraries=', 'l',
"external C libraries to link with"),
('library-dirs=', 'L',
"directories to search for external C libraries"),
('noisy', None,
"show every action (compile, link, run, ...) taken"),
('dump-source', None,
"dump generated source files before attempting to compile them"),
]
# The three standard command methods: since the "config" command
# does nothing by default, these are empty.
def initialize_options(self):
self.compiler = None
self.cc = None
self.include_dirs = None
self.libraries = None
self.library_dirs = None
# maximal output for now
self.noisy = True
self.dump_source = True
# list of temporary files generated along-the-way that we have
# to clean at some point
self.temp_files = []
def finalize_options(self):
if self.include_dirs is None:
self.include_dirs = self.distribution.include_dirs or []
elif isinstance(self.include_dirs, str):
self.include_dirs = self.include_dirs.split(os.pathsep)
if self.libraries is None:
self.libraries = []
elif isinstance(self.libraries, str):
self.libraries = [self.libraries]
if self.library_dirs is None:
self.library_dirs = []
elif isinstance(self.library_dirs, str):
self.library_dirs = self.library_dirs.split(os.pathsep)
def run(self):
pass
# Utility methods for actual "config" commands. The interfaces are
# loosely based on Autoconf macros of similar names. Sub-classes
# may use these freely.
def _check_compiler(self):
"""Check that 'self.compiler' really is a CCompiler object;
if not, make it one.
"""
# We do this late, and only on-demand, because this is an expensive
# import.
from packaging.compiler.ccompiler import CCompiler
from packaging.compiler import new_compiler
if not isinstance(self.compiler, CCompiler):
self.compiler = new_compiler(compiler=self.compiler,
dry_run=self.dry_run, force=True)
customize_compiler(self.compiler)
if self.include_dirs:
self.compiler.set_include_dirs(self.include_dirs)
if self.libraries:
self.compiler.set_libraries(self.libraries)
if self.library_dirs:
self.compiler.set_library_dirs(self.library_dirs)
def _gen_temp_sourcefile(self, body, headers, lang):
filename = "_configtest" + LANG_EXT[lang]
file = open(filename, "w")
if headers:
for header in headers:
file.write("#include <%s>\n" % header)
file.write("\n")
file.write(body)
if body[-1] != "\n":
file.write("\n")
file.close()
return filename
def _preprocess(self, body, headers, include_dirs, lang):
src = self._gen_temp_sourcefile(body, headers, lang)
out = "_configtest.i"
self.temp_files.extend((src, out))
self.compiler.preprocess(src, out, include_dirs=include_dirs)
return src, out
def _compile(self, body, headers, include_dirs, lang):
src = self._gen_temp_sourcefile(body, headers, lang)
if self.dump_source:
dump_file(src, "compiling '%s':" % src)
obj = self.compiler.object_filenames([src])[0]
self.temp_files.extend((src, obj))
self.compiler.compile([src], include_dirs=include_dirs)
return src, obj
def _link(self, body, headers, include_dirs, libraries, library_dirs,
lang):
src, obj = self._compile(body, headers, include_dirs, lang)
prog = os.path.splitext(os.path.basename(src))[0]
self.compiler.link_executable([obj], prog,
libraries=libraries,
library_dirs=library_dirs,
target_lang=lang)
if self.compiler.exe_extension is not None:
prog = prog + self.compiler.exe_extension
self.temp_files.append(prog)
return src, obj, prog
def _clean(self, *filenames):
if not filenames:
filenames = self.temp_files
self.temp_files = []
logger.info("removing: %s", ' '.join(filenames))
for filename in filenames:
try:
os.remove(filename)
except OSError:
pass
# XXX these ignore the dry-run flag: what to do, what to do? even if
# you want a dry-run build, you still need some sort of configuration
# info. My inclination is to make it up to the real config command to
# consult 'dry_run', and assume a default (minimal) configuration if
# true. The problem with trying to do it here is that you'd have to
# return either true or false from all the 'try' methods, neither of
# which is correct.
# XXX need access to the header search path and maybe default macros.
def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"):
"""Construct a source file from 'body' (a string containing lines
of C/C++ code) and 'headers' (a list of header files to include)
and run it through the preprocessor. Return true if the
preprocessor succeeded, false if there were any errors.
('body' probably isn't of much use, but what the heck.)
"""
from packaging.compiler.ccompiler import CompileError
self._check_compiler()
ok = True
try:
self._preprocess(body, headers, include_dirs, lang)
except CompileError:
ok = False
self._clean()
return ok
def search_cpp(self, pattern, body=None, headers=None, include_dirs=None,
lang="c"):
"""Construct a source file (just like 'try_cpp()'), run it through
the preprocessor, and return true if any line of the output matches
'pattern'. 'pattern' should either be a compiled regex object or a
string containing a regex. If both 'body' and 'headers' are None,
preprocesses an empty file -- which can be useful to determine the
symbols the preprocessor and compiler set by default.
"""
self._check_compiler()
src, out = self._preprocess(body, headers, include_dirs, lang)
if isinstance(pattern, str):
pattern = re.compile(pattern)
file = open(out)
match = False
while True:
line = file.readline()
if line == '':
break
if pattern.search(line):
match = True
break
file.close()
self._clean()
return match
def try_compile(self, body, headers=None, include_dirs=None, lang="c"):
"""Try to compile a source file built from 'body' and 'headers'.
Return true on success, false otherwise.
"""
from packaging.compiler.ccompiler import CompileError
self._check_compiler()
try:
self._compile(body, headers, include_dirs, lang)
ok = True
except CompileError:
ok = False
logger.info(ok and "success!" or "failure.")
self._clean()
return ok
def try_link(self, body, headers=None, include_dirs=None, libraries=None,
library_dirs=None, lang="c"):
"""Try to compile and link a source file, built from 'body' and
'headers', to executable form. Return true on success, false
otherwise.
"""
from packaging.compiler.ccompiler import CompileError, LinkError
self._check_compiler()
try:
self._link(body, headers, include_dirs,
libraries, library_dirs, lang)
ok = True
except (CompileError, LinkError):
ok = False
logger.info(ok and "success!" or "failure.")
self._clean()
return ok
def try_run(self, body, headers=None, include_dirs=None, libraries=None,
library_dirs=None, lang="c"):
"""Try to compile, link to an executable, and run a program
built from 'body' and 'headers'. Return true on success, false
otherwise.
"""
from packaging.compiler.ccompiler import CompileError, LinkError
self._check_compiler()
try:
src, obj, exe = self._link(body, headers, include_dirs,
libraries, library_dirs, lang)
self.spawn([exe])
ok = True
except (CompileError, LinkError, PackagingExecError):
ok = False
logger.info(ok and "success!" or "failure.")
self._clean()
return ok
# -- High-level methods --------------------------------------------
# (these are the ones that are actually likely to be useful
# when implementing a real-world config command!)
def check_func(self, func, headers=None, include_dirs=None,
libraries=None, library_dirs=None, decl=False, call=False):
"""Determine if function 'func' is available by constructing a
source file that refers to 'func', and compiles and links it.
If everything succeeds, returns true; otherwise returns false.
The constructed source file starts out by including the header
files listed in 'headers'. If 'decl' is true, it then declares
'func' (as "int func()"); you probably shouldn't supply 'headers'
and set 'decl' true in the same call, or you might get errors about
a conflicting declarations for 'func'. Finally, the constructed
'main()' function either references 'func' or (if 'call' is true)
calls it. 'libraries' and 'library_dirs' are used when
linking.
"""
self._check_compiler()
body = []
if decl:
body.append("int %s ();" % func)
body.append("int main () {")
if call:
body.append(" %s();" % func)
else:
body.append(" %s;" % func)
body.append("}")
body = "\n".join(body) + "\n"
return self.try_link(body, headers, include_dirs,
libraries, library_dirs)
def check_lib(self, library, library_dirs=None, headers=None,
include_dirs=None, other_libraries=[]):
"""Determine if 'library' is available to be linked against,
without actually checking that any particular symbols are provided
by it. 'headers' will be used in constructing the source file to
be compiled, but the only effect of this is to check if all the
header files listed are available. Any libraries listed in
'other_libraries' will be included in the link, in case 'library'
has symbols that depend on other libraries.
"""
self._check_compiler()
return self.try_link("int main (void) { }",
headers, include_dirs,
[library]+other_libraries, library_dirs)
def check_header(self, header, include_dirs=None, library_dirs=None,
lang="c"):
"""Determine if the system header file named by 'header_file'
exists and can be found by the preprocessor; return true if so,
false otherwise.
"""
return self.try_cpp(body="/* No body */", headers=[header],
include_dirs=include_dirs)
def dump_file(filename, head=None):
"""Dumps a file content into log.info.
If head is not None, will be dumped before the file content.
"""
if head is None:
logger.info(filename)
else:
logger.info(head)
with open(filename) as file:
logger.info(file.read())

View file

@ -0,0 +1,79 @@
"""Install platform-independent data files."""
# Contributed by Bastian Kleineidam
import os
from shutil import Error
from sysconfig import get_paths, format_value
from packaging import logger
from packaging.util import convert_path
from packaging.command.cmd import Command
class install_data(Command):
description = "install platform-independent data files"
user_options = [
('install-dir=', 'd',
"base directory for installing data files "
"(default: installation base dir)"),
('root=', None,
"install everything relative to this alternate root directory"),
('force', 'f', "force installation (overwrite existing files)"),
]
boolean_options = ['force']
def initialize_options(self):
self.install_dir = None
self.outfiles = []
self.data_files_out = []
self.root = None
self.force = False
self.data_files = self.distribution.data_files
self.warn_dir = True
def finalize_options(self):
self.set_undefined_options('install_dist',
('install_data', 'install_dir'),
'root', 'force')
def run(self):
self.mkpath(self.install_dir)
for _file in self.data_files.items():
destination = convert_path(self.expand_categories(_file[1]))
dir_dest = os.path.abspath(os.path.dirname(destination))
self.mkpath(dir_dest)
try:
out = self.copy_file(_file[0], dir_dest)[0]
except Error as e:
logger.warning('%s: %s', self.get_command_name(), e)
out = destination
self.outfiles.append(out)
self.data_files_out.append((_file[0], destination))
def expand_categories(self, path_with_categories):
local_vars = get_paths()
local_vars['distribution.name'] = self.distribution.metadata['Name']
expanded_path = format_value(path_with_categories, local_vars)
expanded_path = format_value(expanded_path, local_vars)
if '{' in expanded_path and '}' in expanded_path:
logger.warning(
'%s: unable to expand %s, some categories may be missing',
self.get_command_name(), path_with_categories)
return expanded_path
def get_source_files(self):
return list(self.data_files)
def get_inputs(self):
return list(self.data_files)
def get_outputs(self):
return self.outfiles
def get_resources_out(self):
return self.data_files_out

View file

@ -0,0 +1,625 @@
"""Main install command, which calls the other install_* commands."""
import sys
import os
import sysconfig
from sysconfig import get_config_vars, get_paths, get_path, get_config_var
from packaging import logger
from packaging.command.cmd import Command
from packaging.errors import PackagingPlatformError
from packaging.util import write_file
from packaging.util import convert_path, change_root, get_platform
from packaging.errors import PackagingOptionError
HAS_USER_SITE = True
class install_dist(Command):
description = "install everything from build directory"
user_options = [
# Select installation scheme and set base director(y|ies)
('prefix=', None,
"installation prefix"),
('exec-prefix=', None,
"(Unix only) prefix for platform-specific files"),
('home=', None,
"(Unix only) home directory to install under"),
# Or just set the base director(y|ies)
('install-base=', None,
"base installation directory (instead of --prefix or --home)"),
('install-platbase=', None,
"base installation directory for platform-specific files " +
"(instead of --exec-prefix or --home)"),
('root=', None,
"install everything relative to this alternate root directory"),
# Or explicitly set the installation scheme
('install-purelib=', None,
"installation directory for pure Python module distributions"),
('install-platlib=', None,
"installation directory for non-pure module distributions"),
('install-lib=', None,
"installation directory for all module distributions " +
"(overrides --install-purelib and --install-platlib)"),
('install-headers=', None,
"installation directory for C/C++ headers"),
('install-scripts=', None,
"installation directory for Python scripts"),
('install-data=', None,
"installation directory for data files"),
# Byte-compilation options -- see install_lib.py for details, as
# these are duplicated from there (but only install_lib does
# anything with them).
('compile', 'c', "compile .py to .pyc [default]"),
('no-compile', None, "don't compile .py files"),
('optimize=', 'O',
'also compile with optimization: -O1 for "python -O", '
'-O2 for "python -OO", and -O0 to disable [default: -O0]'),
# Miscellaneous control options
('force', 'f',
"force installation (overwrite any existing files)"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
# Where to install documentation (eventually!)
#('doc-format=', None, "format of documentation to generate"),
#('install-man=', None, "directory for Unix man pages"),
#('install-html=', None, "directory for HTML documentation"),
#('install-info=', None, "directory for GNU info files"),
# XXX use a name that makes clear this is the old format
('record=', None,
"filename in which to record a list of installed files "
"(not PEP 376-compliant)"),
('resources=', None,
"data files mapping"),
# .dist-info related arguments, read by install_dist_info
('no-distinfo', None,
"do not create a .dist-info directory"),
('installer=', None,
"the name of the installer"),
('requested', None,
"generate a REQUESTED file (i.e."),
('no-requested', None,
"do not generate a REQUESTED file"),
('no-record', None,
"do not generate a RECORD file"),
]
boolean_options = ['compile', 'force', 'skip-build', 'no-distinfo',
'requested', 'no-record']
if HAS_USER_SITE:
user_options.append(
('user', None,
"install in user site-packages directory [%s]" %
get_path('purelib', '%s_user' % os.name)))
boolean_options.append('user')
negative_opt = {'no-compile': 'compile', 'no-requested': 'requested'}
def initialize_options(self):
# High-level options: these select both an installation base
# and scheme.
self.prefix = None
self.exec_prefix = None
self.home = None
if HAS_USER_SITE:
self.user = False
# These select only the installation base; it's up to the user to
# specify the installation scheme (currently, that means supplying
# the --install-{platlib,purelib,scripts,data} options).
self.install_base = None
self.install_platbase = None
self.root = None
# These options are the actual installation directories; if not
# supplied by the user, they are filled in using the installation
# scheme implied by prefix/exec-prefix/home and the contents of
# that installation scheme.
self.install_purelib = None # for pure module distributions
self.install_platlib = None # non-pure (dists w/ extensions)
self.install_headers = None # for C/C++ headers
self.install_lib = None # set to either purelib or platlib
self.install_scripts = None
self.install_data = None
if HAS_USER_SITE:
self.install_userbase = get_config_var('userbase')
self.install_usersite = get_path('purelib', '%s_user' % os.name)
self.compile = None
self.optimize = None
# These two are for putting non-packagized distributions into their
# own directory and creating a .pth file if it makes sense.
# 'extra_path' comes from the setup file; 'install_path_file' can
# be turned off if it makes no sense to install a .pth file. (But
# better to install it uselessly than to guess wrong and not
# install it when it's necessary and would be used!) Currently,
# 'install_path_file' is always true unless some outsider meddles
# with it.
self.extra_path = None
self.install_path_file = True
# 'force' forces installation, even if target files are not
# out-of-date. 'skip_build' skips running the "build" command,
# handy if you know it's not necessary. 'warn_dir' (which is *not*
# a user option, it's just there so the bdist_* commands can turn
# it off) determines whether we warn about installing to a
# directory not in sys.path.
self.force = False
self.skip_build = False
self.warn_dir = True
# These are only here as a conduit from the 'build' command to the
# 'install_*' commands that do the real work. ('build_base' isn't
# actually used anywhere, but it might be useful in future.) They
# are not user options, because if the user told the install
# command where the build directory is, that wouldn't affect the
# build command.
self.build_base = None
self.build_lib = None
# Not defined yet because we don't know anything about
# documentation yet.
#self.install_man = None
#self.install_html = None
#self.install_info = None
self.record = None
self.resources = None
# .dist-info related options
self.no_distinfo = None
self.installer = None
self.requested = None
self.no_record = None
self.no_resources = None
# -- Option finalizing methods -------------------------------------
# (This is rather more involved than for most commands,
# because this is where the policy for installing third-
# party Python modules on various platforms given a wide
# array of user input is decided. Yes, it's quite complex!)
def finalize_options(self):
# This method (and its pliant slaves, like 'finalize_unix()',
# 'finalize_other()', and 'select_scheme()') is where the default
# installation directories for modules, extension modules, and
# anything else we care to install from a Python module
# distribution. Thus, this code makes a pretty important policy
# statement about how third-party stuff is added to a Python
# installation! Note that the actual work of installation is done
# by the relatively simple 'install_*' commands; they just take
# their orders from the installation directory options determined
# here.
# Check for errors/inconsistencies in the options; first, stuff
# that's wrong on any platform.
if ((self.prefix or self.exec_prefix or self.home) and
(self.install_base or self.install_platbase)):
raise PackagingOptionError(
"must supply either prefix/exec-prefix/home or "
"install-base/install-platbase -- not both")
if self.home and (self.prefix or self.exec_prefix):
raise PackagingOptionError(
"must supply either home or prefix/exec-prefix -- not both")
if HAS_USER_SITE and self.user and (
self.prefix or self.exec_prefix or self.home or
self.install_base or self.install_platbase):
raise PackagingOptionError(
"can't combine user with prefix/exec_prefix/home or "
"install_base/install_platbase")
# Next, stuff that's wrong (or dubious) only on certain platforms.
if os.name != "posix":
if self.exec_prefix:
logger.warning(
'%s: exec-prefix option ignored on this platform',
self.get_command_name())
self.exec_prefix = None
# Now the interesting logic -- so interesting that we farm it out
# to other methods. The goal of these methods is to set the final
# values for the install_{lib,scripts,data,...} options, using as
# input a heady brew of prefix, exec_prefix, home, install_base,
# install_platbase, user-supplied versions of
# install_{purelib,platlib,lib,scripts,data,...}, and the
# INSTALL_SCHEME dictionary above. Phew!
self.dump_dirs("pre-finalize_{unix,other}")
if os.name == 'posix':
self.finalize_unix()
else:
self.finalize_other()
self.dump_dirs("post-finalize_{unix,other}()")
# Expand configuration variables, tilde, etc. in self.install_base
# and self.install_platbase -- that way, we can use $base or
# $platbase in the other installation directories and not worry
# about needing recursive variable expansion (shudder).
py_version = sys.version.split()[0]
prefix, exec_prefix, srcdir, projectbase = get_config_vars(
'prefix', 'exec_prefix', 'srcdir', 'projectbase')
metadata = self.distribution.metadata
self.config_vars = {
'dist_name': metadata['Name'],
'dist_version': metadata['Version'],
'dist_fullname': metadata.get_fullname(),
'py_version': py_version,
'py_version_short': py_version[:3],
'py_version_nodot': py_version[:3:2],
'sys_prefix': prefix,
'prefix': prefix,
'sys_exec_prefix': exec_prefix,
'exec_prefix': exec_prefix,
'srcdir': srcdir,
'projectbase': projectbase,
}
if HAS_USER_SITE:
self.config_vars['userbase'] = self.install_userbase
self.config_vars['usersite'] = self.install_usersite
self.expand_basedirs()
self.dump_dirs("post-expand_basedirs()")
# Now define config vars for the base directories so we can expand
# everything else.
self.config_vars['base'] = self.install_base
self.config_vars['platbase'] = self.install_platbase
# Expand "~" and configuration variables in the installation
# directories.
self.expand_dirs()
self.dump_dirs("post-expand_dirs()")
# Create directories in the home dir:
if HAS_USER_SITE and self.user:
self.create_home_path()
# Pick the actual directory to install all modules to: either
# install_purelib or install_platlib, depending on whether this
# module distribution is pure or not. Of course, if the user
# already specified install_lib, use their selection.
if self.install_lib is None:
if self.distribution.ext_modules: # has extensions: non-pure
self.install_lib = self.install_platlib
else:
self.install_lib = self.install_purelib
# Convert directories from Unix /-separated syntax to the local
# convention.
self.convert_paths('lib', 'purelib', 'platlib',
'scripts', 'data', 'headers')
if HAS_USER_SITE:
self.convert_paths('userbase', 'usersite')
# Well, we're not actually fully completely finalized yet: we still
# have to deal with 'extra_path', which is the hack for allowing
# non-packagized module distributions (hello, Numerical Python!) to
# get their own directories.
self.handle_extra_path()
self.install_libbase = self.install_lib # needed for .pth file
self.install_lib = os.path.join(self.install_lib, self.extra_dirs)
# If a new root directory was supplied, make all the installation
# dirs relative to it.
if self.root is not None:
self.change_roots('libbase', 'lib', 'purelib', 'platlib',
'scripts', 'data', 'headers')
self.dump_dirs("after prepending root")
# Find out the build directories, ie. where to install from.
self.set_undefined_options('build', 'build_base', 'build_lib')
# Punt on doc directories for now -- after all, we're punting on
# documentation completely!
if self.no_distinfo is None:
self.no_distinfo = False
def finalize_unix(self):
"""Finalize options for posix platforms."""
if self.install_base is not None or self.install_platbase is not None:
if ((self.install_lib is None and
self.install_purelib is None and
self.install_platlib is None) or
self.install_headers is None or
self.install_scripts is None or
self.install_data is None):
raise PackagingOptionError(
"install-base or install-platbase supplied, but "
"installation scheme is incomplete")
return
if HAS_USER_SITE and self.user:
if self.install_userbase is None:
raise PackagingPlatformError(
"user base directory is not specified")
self.install_base = self.install_platbase = self.install_userbase
self.select_scheme("posix_user")
elif self.home is not None:
self.install_base = self.install_platbase = self.home
self.select_scheme("posix_home")
else:
if self.prefix is None:
if self.exec_prefix is not None:
raise PackagingOptionError(
"must not supply exec-prefix without prefix")
self.prefix = os.path.normpath(sys.prefix)
self.exec_prefix = os.path.normpath(sys.exec_prefix)
else:
if self.exec_prefix is None:
self.exec_prefix = self.prefix
self.install_base = self.prefix
self.install_platbase = self.exec_prefix
self.select_scheme("posix_prefix")
def finalize_other(self):
"""Finalize options for non-posix platforms"""
if HAS_USER_SITE and self.user:
if self.install_userbase is None:
raise PackagingPlatformError(
"user base directory is not specified")
self.install_base = self.install_platbase = self.install_userbase
self.select_scheme(os.name + "_user")
elif self.home is not None:
self.install_base = self.install_platbase = self.home
self.select_scheme("posix_home")
else:
if self.prefix is None:
self.prefix = os.path.normpath(sys.prefix)
self.install_base = self.install_platbase = self.prefix
try:
self.select_scheme(os.name)
except KeyError:
raise PackagingPlatformError(
"no support for installation on '%s'" % os.name)
def dump_dirs(self, msg):
"""Dump the list of user options."""
logger.debug(msg + ":")
for opt in self.user_options:
opt_name = opt[0]
if opt_name[-1] == "=":
opt_name = opt_name[0:-1]
if opt_name in self.negative_opt:
opt_name = self.negative_opt[opt_name]
opt_name = opt_name.replace('-', '_')
val = not getattr(self, opt_name)
else:
opt_name = opt_name.replace('-', '_')
val = getattr(self, opt_name)
logger.debug(" %s: %s", opt_name, val)
def select_scheme(self, name):
"""Set the install directories by applying the install schemes."""
# it's the caller's problem if they supply a bad name!
scheme = get_paths(name, expand=False)
for key, value in scheme.items():
if key == 'platinclude':
key = 'headers'
value = os.path.join(value, self.distribution.metadata['Name'])
attrname = 'install_' + key
if hasattr(self, attrname):
if getattr(self, attrname) is None:
setattr(self, attrname, value)
def _expand_attrs(self, attrs):
for attr in attrs:
val = getattr(self, attr)
if val is not None:
if os.name == 'posix' or os.name == 'nt':
val = os.path.expanduser(val)
# see if we want to push this work in sysconfig XXX
val = sysconfig._subst_vars(val, self.config_vars)
setattr(self, attr, val)
def expand_basedirs(self):
"""Call `os.path.expanduser` on install_{base,platbase} and root."""
self._expand_attrs(['install_base', 'install_platbase', 'root'])
def expand_dirs(self):
"""Call `os.path.expanduser` on install dirs."""
self._expand_attrs(['install_purelib', 'install_platlib',
'install_lib', 'install_headers',
'install_scripts', 'install_data'])
def convert_paths(self, *names):
"""Call `convert_path` over `names`."""
for name in names:
attr = "install_" + name
setattr(self, attr, convert_path(getattr(self, attr)))
def handle_extra_path(self):
"""Set `path_file` and `extra_dirs` using `extra_path`."""
if self.extra_path is None:
self.extra_path = self.distribution.extra_path
if self.extra_path is not None:
if isinstance(self.extra_path, str):
self.extra_path = self.extra_path.split(',')
if len(self.extra_path) == 1:
path_file = extra_dirs = self.extra_path[0]
elif len(self.extra_path) == 2:
path_file, extra_dirs = self.extra_path
else:
raise PackagingOptionError(
"'extra_path' option must be a list, tuple, or "
"comma-separated string with 1 or 2 elements")
# convert to local form in case Unix notation used (as it
# should be in setup scripts)
extra_dirs = convert_path(extra_dirs)
else:
path_file = None
extra_dirs = ''
# XXX should we warn if path_file and not extra_dirs? (in which
# case the path file would be harmless but pointless)
self.path_file = path_file
self.extra_dirs = extra_dirs
def change_roots(self, *names):
"""Change the install direcories pointed by name using root."""
for name in names:
attr = "install_" + name
setattr(self, attr, change_root(self.root, getattr(self, attr)))
def create_home_path(self):
"""Create directories under ~."""
if HAS_USER_SITE and not self.user:
return
home = convert_path(os.path.expanduser("~"))
for name, path in self.config_vars.items():
if path.startswith(home) and not os.path.isdir(path):
os.makedirs(path, 0o700)
# -- Command execution methods -------------------------------------
def run(self):
"""Runs the command."""
# Obviously have to build before we can install
if not self.skip_build:
self.run_command('build')
# If we built for any other platform, we can't install.
build_plat = self.distribution.get_command_obj('build').plat_name
# check warn_dir - it is a clue that the 'install_dist' is happening
# internally, and not to sys.path, so we don't check the platform
# matches what we are running.
if self.warn_dir and build_plat != get_platform():
raise PackagingPlatformError("Can't install when "
"cross-compiling")
# Run all sub-commands (at least those that need to be run)
for cmd_name in self.get_sub_commands():
self.run_command(cmd_name)
if self.path_file:
self.create_path_file()
# write list of installed files, if requested.
if self.record:
outputs = self.get_outputs()
if self.root: # strip any package prefix
root_len = len(self.root)
for counter in range(len(outputs)):
outputs[counter] = outputs[counter][root_len:]
self.execute(write_file,
(self.record, outputs),
"writing list of installed files to '%s'" %
self.record)
normpath, normcase = os.path.normpath, os.path.normcase
sys_path = [normcase(normpath(p)) for p in sys.path]
install_lib = normcase(normpath(self.install_lib))
if (self.warn_dir and
not (self.path_file and self.install_path_file) and
install_lib not in sys_path):
logger.debug(("modules installed to '%s', which is not in "
"Python's module search path (sys.path) -- "
"you'll have to change the search path yourself"),
self.install_lib)
def create_path_file(self):
"""Creates the .pth file"""
filename = os.path.join(self.install_libbase,
self.path_file + ".pth")
if self.install_path_file:
self.execute(write_file,
(filename, [self.extra_dirs]),
"creating %s" % filename)
else:
logger.warning('%s: path file %r not created',
self.get_command_name(), filename)
# -- Reporting methods ---------------------------------------------
def get_outputs(self):
"""Assembles the outputs of all the sub-commands."""
outputs = []
for cmd_name in self.get_sub_commands():
cmd = self.get_finalized_command(cmd_name)
# Add the contents of cmd.get_outputs(), ensuring
# that outputs doesn't contain duplicate entries
for filename in cmd.get_outputs():
if filename not in outputs:
outputs.append(filename)
if self.path_file and self.install_path_file:
outputs.append(os.path.join(self.install_libbase,
self.path_file + ".pth"))
return outputs
def get_inputs(self):
"""Returns the inputs of all the sub-commands"""
# XXX gee, this looks familiar ;-(
inputs = []
for cmd_name in self.get_sub_commands():
cmd = self.get_finalized_command(cmd_name)
inputs.extend(cmd.get_inputs())
return inputs
# -- Predicates for sub-command list -------------------------------
def has_lib(self):
"""Returns true if the current distribution has any Python
modules to install."""
return (self.distribution.has_pure_modules() or
self.distribution.has_ext_modules())
def has_headers(self):
"""Returns true if the current distribution has any headers to
install."""
return self.distribution.has_headers()
def has_scripts(self):
"""Returns true if the current distribution has any scripts to.
install."""
return self.distribution.has_scripts()
def has_data(self):
"""Returns true if the current distribution has any data to.
install."""
return self.distribution.has_data_files()
# 'sub_commands': a list of commands this command might have to run to
# get its work done. See cmd.py for more info.
sub_commands = [('install_lib', has_lib),
('install_headers', has_headers),
('install_scripts', has_scripts),
('install_data', has_data),
# keep install_distinfo last, as it needs the record
# with files to be completely generated
('install_distinfo', lambda self: not self.no_distinfo),
]

View file

@ -0,0 +1,175 @@
"""Create the PEP 376-compliant .dist-info directory."""
# Forked from the former install_egg_info command by Josip Djolonga
import csv
import os
import re
import hashlib
from packaging.command.cmd import Command
from packaging import logger
from shutil import rmtree
class install_distinfo(Command):
description = 'create a .dist-info directory for the distribution'
user_options = [
('distinfo-dir=', None,
"directory where the the .dist-info directory will be installed"),
('installer=', None,
"the name of the installer"),
('requested', None,
"generate a REQUESTED file"),
('no-requested', None,
"do not generate a REQUESTED file"),
('no-record', None,
"do not generate a RECORD file"),
('no-resources', None,
"do not generate a RESSOURCES list installed file")
]
boolean_options = ['requested', 'no-record', 'no-resources']
negative_opt = {'no-requested': 'requested'}
def initialize_options(self):
self.distinfo_dir = None
self.installer = None
self.requested = None
self.no_record = None
self.no_resources = None
def finalize_options(self):
self.set_undefined_options('install_dist',
'installer', 'requested', 'no_record')
self.set_undefined_options('install_lib',
('install_dir', 'distinfo_dir'))
if self.installer is None:
# FIXME distutils or packaging?
# + document default in the option help text above and in install
self.installer = 'distutils'
if self.requested is None:
self.requested = True
if self.no_record is None:
self.no_record = False
if self.no_resources is None:
self.no_resources = False
metadata = self.distribution.metadata
basename = "%s-%s.dist-info" % (
to_filename(safe_name(metadata['Name'])),
to_filename(safe_version(metadata['Version'])))
self.distinfo_dir = os.path.join(self.distinfo_dir, basename)
self.outputs = []
def run(self):
# FIXME dry-run should be used at a finer level, so that people get
# useful logging output and can have an idea of what the command would
# have done
if not self.dry_run:
target = self.distinfo_dir
if os.path.isdir(target) and not os.path.islink(target):
rmtree(target)
elif os.path.exists(target):
self.execute(os.unlink, (self.distinfo_dir,),
"removing " + target)
self.execute(os.makedirs, (target,), "creating " + target)
metadata_path = os.path.join(self.distinfo_dir, 'METADATA')
logger.info('creating %s', metadata_path)
self.distribution.metadata.write(metadata_path)
self.outputs.append(metadata_path)
installer_path = os.path.join(self.distinfo_dir, 'INSTALLER')
logger.info('creating %s', installer_path)
with open(installer_path, 'w') as f:
f.write(self.installer)
self.outputs.append(installer_path)
if self.requested:
requested_path = os.path.join(self.distinfo_dir, 'REQUESTED')
logger.info('creating %s', requested_path)
open(requested_path, 'w').close()
self.outputs.append(requested_path)
if not self.no_resources:
install_data = self.get_finalized_command('install_data')
if install_data.get_resources_out() != []:
resources_path = os.path.join(self.distinfo_dir,
'RESOURCES')
logger.info('creating %s', resources_path)
with open(resources_path, 'wb') as f:
writer = csv.writer(f, delimiter=',',
lineterminator=os.linesep,
quotechar='"')
for tuple in install_data.get_resources_out():
writer.writerow(tuple)
self.outputs.append(resources_path)
if not self.no_record:
record_path = os.path.join(self.distinfo_dir, 'RECORD')
logger.info('creating %s', record_path)
with open(record_path, 'w', encoding='utf-8') as f:
writer = csv.writer(f, delimiter=',',
lineterminator=os.linesep,
quotechar='"')
install = self.get_finalized_command('install_dist')
for fpath in install.get_outputs():
if fpath.endswith('.pyc') or fpath.endswith('.pyo'):
# do not put size and md5 hash, as in PEP-376
writer.writerow((fpath, '', ''))
else:
size = os.path.getsize(fpath)
with open(fpath, 'r') as fp:
hash = hashlib.md5()
hash.update(fp.read().encode())
md5sum = hash.hexdigest()
writer.writerow((fpath, md5sum, size))
# add the RECORD file itself
writer.writerow((record_path, '', ''))
self.outputs.append(record_path)
def get_outputs(self):
return self.outputs
# The following functions are taken from setuptools' pkg_resources module.
def safe_name(name):
"""Convert an arbitrary string to a standard distribution name
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
"""
return re.sub('[^A-Za-z0-9.]+', '-', name)
def safe_version(version):
"""Convert an arbitrary string to a standard version string
Spaces become dots, and all other non-alphanumeric characters become
dashes, with runs of multiple dashes condensed to a single dash.
"""
version = version.replace(' ', '.')
return re.sub('[^A-Za-z0-9.]+', '-', version)
def to_filename(name):
"""Convert a project or version name to its filename-escaped form
Any '-' characters are currently replaced with '_'.
"""
return name.replace('-', '_')

View file

@ -0,0 +1,43 @@
"""Install C/C++ header files to the Python include directory."""
from packaging.command.cmd import Command
# XXX force is never used
class install_headers(Command):
description = "install C/C++ header files"
user_options = [('install-dir=', 'd',
"directory to install header files to"),
('force', 'f',
"force installation (overwrite existing files)"),
]
boolean_options = ['force']
def initialize_options(self):
self.install_dir = None
self.force = False
self.outfiles = []
def finalize_options(self):
self.set_undefined_options('install_dist',
('install_headers', 'install_dir'),
'force')
def run(self):
headers = self.distribution.headers
if not headers:
return
self.mkpath(self.install_dir)
for header in headers:
out = self.copy_file(header, self.install_dir)[0]
self.outfiles.append(out)
def get_inputs(self):
return self.distribution.headers or []
def get_outputs(self):
return self.outfiles

View file

@ -0,0 +1,222 @@
"""Install all modules (extensions and pure Python)."""
import os
import sys
import logging
from packaging import logger
from packaging.command.cmd import Command
from packaging.errors import PackagingOptionError
# Extension for Python source files.
if hasattr(os, 'extsep'):
PYTHON_SOURCE_EXTENSION = os.extsep + "py"
else:
PYTHON_SOURCE_EXTENSION = ".py"
class install_lib(Command):
description = "install all modules (extensions and pure Python)"
# The byte-compilation options are a tad confusing. Here are the
# possible scenarios:
# 1) no compilation at all (--no-compile --no-optimize)
# 2) compile .pyc only (--compile --no-optimize; default)
# 3) compile .pyc and "level 1" .pyo (--compile --optimize)
# 4) compile "level 1" .pyo only (--no-compile --optimize)
# 5) compile .pyc and "level 2" .pyo (--compile --optimize-more)
# 6) compile "level 2" .pyo only (--no-compile --optimize-more)
#
# The UI for this is two option, 'compile' and 'optimize'.
# 'compile' is strictly boolean, and only decides whether to
# generate .pyc files. 'optimize' is three-way (0, 1, or 2), and
# decides both whether to generate .pyo files and what level of
# optimization to use.
user_options = [
('install-dir=', 'd', "directory to install to"),
('build-dir=','b', "build directory (where to install from)"),
('force', 'f', "force installation (overwrite existing files)"),
('compile', 'c', "compile .py to .pyc [default]"),
('no-compile', None, "don't compile .py files"),
('optimize=', 'O',
"also compile with optimization: -O1 for \"python -O\", "
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
('skip-build', None, "skip the build steps"),
]
boolean_options = ['force', 'compile', 'skip-build']
negative_opt = {'no-compile' : 'compile'}
def initialize_options(self):
# let the 'install_dist' command dictate our installation directory
self.install_dir = None
self.build_dir = None
self.force = False
self.compile = None
self.optimize = None
self.skip_build = None
def finalize_options(self):
# Get all the information we need to install pure Python modules
# from the umbrella 'install_dist' command -- build (source) directory,
# install (target) directory, and whether to compile .py files.
self.set_undefined_options('install_dist',
('build_lib', 'build_dir'),
('install_lib', 'install_dir'),
'force', 'compile', 'optimize', 'skip_build')
if self.compile is None:
self.compile = True
if self.optimize is None:
self.optimize = 0
if not isinstance(self.optimize, int):
try:
self.optimize = int(self.optimize)
if self.optimize not in (0, 1, 2):
raise AssertionError
except (ValueError, AssertionError):
raise PackagingOptionError("optimize must be 0, 1, or 2")
def run(self):
# Make sure we have built everything we need first
self.build()
# Install everything: simply dump the entire contents of the build
# directory to the installation directory (that's the beauty of
# having a build directory!)
outfiles = self.install()
# (Optionally) compile .py to .pyc
if outfiles is not None and self.distribution.has_pure_modules():
self.byte_compile(outfiles)
# -- Top-level worker functions ------------------------------------
# (called from 'run()')
def build(self):
if not self.skip_build:
if self.distribution.has_pure_modules():
self.run_command('build_py')
if self.distribution.has_ext_modules():
self.run_command('build_ext')
def install(self):
if os.path.isdir(self.build_dir):
outfiles = self.copy_tree(self.build_dir, self.install_dir)
else:
logger.warning(
'%s: %r does not exist -- no Python modules to install',
self.get_command_name(), self.build_dir)
return
return outfiles
def byte_compile(self, files):
if getattr(sys, 'dont_write_bytecode'):
# XXX do we want this? because a Python runs without bytecode
# doesn't mean that the *dists should not contain bytecode
#--or does it?
logger.warning('%s: byte-compiling is disabled, skipping.',
self.get_command_name())
return
from packaging.util import byte_compile
# Get the "--root" directory supplied to the "install_dist" command,
# and use it as a prefix to strip off the purported filename
# encoded in bytecode files. This is far from complete, but it
# should at least generate usable bytecode in RPM distributions.
install_root = self.get_finalized_command('install_dist').root
# Temporary kludge until we remove the verbose arguments and use
# logging everywhere
verbose = logger.getEffectiveLevel() >= logging.DEBUG
if self.compile:
byte_compile(files, optimize=0,
force=self.force, prefix=install_root,
dry_run=self.dry_run)
if self.optimize > 0:
byte_compile(files, optimize=self.optimize,
force=self.force, prefix=install_root,
verbose=verbose,
dry_run=self.dry_run)
# -- Utility methods -----------------------------------------------
def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir):
if not has_any:
return []
build_cmd = self.get_finalized_command(build_cmd)
build_files = build_cmd.get_outputs()
build_dir = getattr(build_cmd, cmd_option)
prefix_len = len(build_dir) + len(os.sep)
outputs = []
for file in build_files:
outputs.append(os.path.join(output_dir, file[prefix_len:]))
return outputs
def _bytecode_filenames(self, py_filenames):
bytecode_files = []
for py_file in py_filenames:
# Since build_py handles package data installation, the
# list of outputs can contain more than just .py files.
# Make sure we only report bytecode for the .py files.
ext = os.path.splitext(os.path.normcase(py_file))[1]
if ext != PYTHON_SOURCE_EXTENSION:
continue
if self.compile:
bytecode_files.append(py_file + "c")
if self.optimize > 0:
bytecode_files.append(py_file + "o")
return bytecode_files
# -- External interface --------------------------------------------
# (called by outsiders)
def get_outputs(self):
"""Return the list of files that would be installed if this command
were actually run. Not affected by the "dry-run" flag or whether
modules have actually been built yet.
"""
pure_outputs = \
self._mutate_outputs(self.distribution.has_pure_modules(),
'build_py', 'build_lib',
self.install_dir)
if self.compile:
bytecode_outputs = self._bytecode_filenames(pure_outputs)
else:
bytecode_outputs = []
ext_outputs = \
self._mutate_outputs(self.distribution.has_ext_modules(),
'build_ext', 'build_lib',
self.install_dir)
return pure_outputs + bytecode_outputs + ext_outputs
def get_inputs(self):
"""Get the list of files that are input to this command, ie. the
files that get installed as they are named in the build tree.
The files in this list correspond one-to-one to the output
filenames returned by 'get_outputs()'.
"""
inputs = []
if self.distribution.has_pure_modules():
build_py = self.get_finalized_command('build_py')
inputs.extend(build_py.get_outputs())
if self.distribution.has_ext_modules():
build_ext = self.get_finalized_command('build_ext')
inputs.extend(build_ext.get_outputs())
return inputs

View file

@ -0,0 +1,59 @@
"""Install scripts."""
# Contributed by Bastian Kleineidam
import os
from packaging.command.cmd import Command
from packaging import logger
class install_scripts(Command):
description = "install scripts (Python or otherwise)"
user_options = [
('install-dir=', 'd', "directory to install scripts to"),
('build-dir=','b', "build directory (where to install from)"),
('force', 'f', "force installation (overwrite existing files)"),
('skip-build', None, "skip the build steps"),
]
boolean_options = ['force', 'skip-build']
def initialize_options(self):
self.install_dir = None
self.force = False
self.build_dir = None
self.skip_build = None
def finalize_options(self):
self.set_undefined_options('build', ('build_scripts', 'build_dir'))
self.set_undefined_options('install_dist',
('install_scripts', 'install_dir'),
'force', 'skip_build')
def run(self):
if not self.skip_build:
self.run_command('build_scripts')
if not os.path.exists(self.build_dir):
self.outfiles = []
return
self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
if os.name == 'posix':
# Set the executable bits (owner, group, and world) on
# all the scripts we just installed.
for file in self.get_outputs():
if self.dry_run:
logger.info("changing mode of %s", file)
else:
mode = (os.stat(file).st_mode | 0o555) & 0o7777
logger.info("changing mode of %s to %o", file, mode)
os.chmod(file, mode)
def get_inputs(self):
return self.distribution.scripts or []
def get_outputs(self):
return self.outfiles or []

View file

@ -0,0 +1,282 @@
"""Register a release with a project index."""
# Contributed by Richard Jones
import io
import getpass
import urllib.error
import urllib.parse
import urllib.request
from packaging import logger
from packaging.util import (read_pypirc, generate_pypirc, DEFAULT_REPOSITORY,
DEFAULT_REALM, get_pypirc_path)
from packaging.command.cmd import Command
class register(Command):
description = "register a release with PyPI"
user_options = [
('repository=', 'r',
"repository URL [default: %s]" % DEFAULT_REPOSITORY),
('show-response', None,
"display full response text from server"),
('list-classifiers', None,
"list valid Trove classifiers"),
('strict', None ,
"stop the registration if the metadata is not fully compliant")
]
boolean_options = ['show-response', 'list-classifiers', 'strict']
def initialize_options(self):
self.repository = None
self.realm = None
self.show_response = False
self.list_classifiers = False
self.strict = False
def finalize_options(self):
if self.repository is None:
self.repository = DEFAULT_REPOSITORY
if self.realm is None:
self.realm = DEFAULT_REALM
def run(self):
self._set_config()
# Check the package metadata
check = self.distribution.get_command_obj('check')
if check.strict != self.strict and not check.all:
# If check was already run but with different options,
# re-run it
check.strict = self.strict
check.all = True
self.distribution.have_run.pop('check', None)
self.run_command('check')
if self.dry_run:
self.verify_metadata()
elif self.list_classifiers:
self.classifiers()
else:
self.send_metadata()
def _set_config(self):
''' Reads the configuration file and set attributes.
'''
config = read_pypirc(self.repository, self.realm)
if config != {}:
self.username = config['username']
self.password = config['password']
self.repository = config['repository']
self.realm = config['realm']
self.has_config = True
else:
if self.repository not in ('pypi', DEFAULT_REPOSITORY):
raise ValueError('%s not found in .pypirc' % self.repository)
if self.repository == 'pypi':
self.repository = DEFAULT_REPOSITORY
self.has_config = False
def classifiers(self):
''' Fetch the list of classifiers from the server.
'''
response = urllib.request.urlopen(self.repository+'?:action=list_classifiers')
logger.info(response.read())
def verify_metadata(self):
''' Send the metadata to the package index server to be checked.
'''
# send the info to the server and report the result
code, result = self.post_to_server(self.build_post_data('verify'))
logger.info('server response (%s): %s', code, result)
def send_metadata(self):
''' Send the metadata to the package index server.
Well, do the following:
1. figure who the user is, and then
2. send the data as a Basic auth'ed POST.
First we try to read the username/password from $HOME/.pypirc,
which is a ConfigParser-formatted file with a section
[distutils] containing username and password entries (both
in clear text). Eg:
[distutils]
index-servers =
pypi
[pypi]
username: fred
password: sekrit
Otherwise, to figure who the user is, we offer the user three
choices:
1. use existing login,
2. register as a new user, or
3. set the password to a random string and email the user.
'''
# TODO factor registration out into another method
# TODO use print to print, not logging
# see if we can short-cut and get the username/password from the
# config
if self.has_config:
choice = '1'
username = self.username
password = self.password
else:
choice = 'x'
username = password = ''
# get the user's login info
choices = '1 2 3 4'.split()
while choice not in choices:
logger.info('''\
We need to know who you are, so please choose either:
1. use your existing login,
2. register as a new user,
3. have the server generate a new password for you (and email it to you), or
4. quit
Your selection [default 1]: ''')
choice = input()
if not choice:
choice = '1'
elif choice not in choices:
print('Please choose one of the four options!')
if choice == '1':
# get the username and password
while not username:
username = input('Username: ')
while not password:
password = getpass.getpass('Password: ')
# set up the authentication
auth = urllib.request.HTTPPasswordMgr()
host = urllib.parse.urlparse(self.repository)[1]
auth.add_password(self.realm, host, username, password)
# send the info to the server and report the result
code, result = self.post_to_server(self.build_post_data('submit'),
auth)
logger.info('Server response (%s): %s', code, result)
# possibly save the login
if code == 200:
if self.has_config:
# sharing the password in the distribution instance
# so the upload command can reuse it
self.distribution.password = password
else:
logger.info(
'I can store your PyPI login so future submissions '
'will be faster.\n(the login will be stored in %s)',
get_pypirc_path())
choice = 'X'
while choice.lower() not in 'yn':
choice = input('Save your login (y/N)?')
if not choice:
choice = 'n'
if choice.lower() == 'y':
generate_pypirc(username, password)
elif choice == '2':
data = {':action': 'user'}
data['name'] = data['password'] = data['email'] = ''
data['confirm'] = None
while not data['name']:
data['name'] = input('Username: ')
while data['password'] != data['confirm']:
while not data['password']:
data['password'] = getpass.getpass('Password: ')
while not data['confirm']:
data['confirm'] = getpass.getpass(' Confirm: ')
if data['password'] != data['confirm']:
data['password'] = ''
data['confirm'] = None
print("Password and confirm don't match!")
while not data['email']:
data['email'] = input(' EMail: ')
code, result = self.post_to_server(data)
if code != 200:
logger.info('server response (%s): %s', code, result)
else:
logger.info('you will receive an email shortly; follow the '
'instructions in it to complete registration.')
elif choice == '3':
data = {':action': 'password_reset'}
data['email'] = ''
while not data['email']:
data['email'] = input('Your email address: ')
code, result = self.post_to_server(data)
logger.info('server response (%s): %s', code, result)
def build_post_data(self, action):
# figure the data to send - the metadata plus some additional
# information used by the package server
data = self.distribution.metadata.todict()
data[':action'] = action
return data
# XXX to be refactored with upload.upload_file
def post_to_server(self, data, auth=None):
''' Post a query to the server, and return a string response.
'''
if 'name' in data:
logger.info('Registering %s to %s', data['name'], self.repository)
# Build up the MIME payload for the urllib2 POST data
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
sep_boundary = '\n--' + boundary
end_boundary = sep_boundary + '--'
body = io.StringIO()
for key, value in data.items():
# handle multiple entries for the same name
if not isinstance(value, (tuple, list)):
value = [value]
for value in value:
body.write(sep_boundary)
body.write('\nContent-Disposition: form-data; name="%s"'%key)
body.write("\n\n")
body.write(value)
if value and value[-1] == '\r':
body.write('\n') # write an extra newline (lurve Macs)
body.write(end_boundary)
body.write("\n")
body = body.getvalue()
# build the Request
headers = {
'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'%boundary,
'Content-length': str(len(body))
}
req = urllib.request.Request(self.repository, body, headers)
# handle HTTP and include the Basic Auth handler
opener = urllib.request.build_opener(
urllib.request.HTTPBasicAuthHandler(password_mgr=auth)
)
data = ''
try:
result = opener.open(req)
except urllib.error.HTTPError as e:
if self.show_response:
data = e.fp.read()
result = e.code, e.msg
except urllib.error.URLError as e:
result = 500, str(e)
else:
if self.show_response:
data = result.read()
result = 200, 'OK'
if self.show_response:
dashes = '-' * 75
logger.info('%s%s%s', dashes, data, dashes)
return result

View file

@ -0,0 +1,375 @@
"""Create a source distribution."""
import os
import sys
import re
from io import StringIO
from glob import glob
from shutil import get_archive_formats, rmtree
from packaging import logger
from packaging.util import resolve_name
from packaging.errors import (PackagingPlatformError, PackagingOptionError,
PackagingModuleError, PackagingFileError)
from packaging.command import get_command_names
from packaging.command.cmd import Command
from packaging.manifest import Manifest
def show_formats():
"""Print all possible values for the 'formats' option (used by
the "--help-formats" command-line option).
"""
from packaging.fancy_getopt import FancyGetopt
formats = sorted(('formats=' + name, None, desc)
for name, desc in get_archive_formats())
FancyGetopt(formats).print_help(
"List of available source distribution formats:")
# a \ followed by some spaces + EOL
_COLLAPSE_PATTERN = re.compile('\\\w\n', re.M)
_COMMENTED_LINE = re.compile('^#.*\n$|^\w*\n$', re.M)
class sdist(Command):
description = "create a source distribution (tarball, zip file, etc.)"
user_options = [
('manifest=', 'm',
"name of manifest file [default: MANIFEST]"),
('use-defaults', None,
"include the default file set in the manifest "
"[default; disable with --no-defaults]"),
('no-defaults', None,
"don't include the default file set"),
('prune', None,
"specifically exclude files/directories that should not be "
"distributed (build tree, RCS/CVS dirs, etc.) "
"[default; disable with --no-prune]"),
('no-prune', None,
"don't automatically exclude anything"),
('manifest-only', 'o',
"just regenerate the manifest and then stop "),
('formats=', None,
"formats for source distribution (comma-separated list)"),
('keep-temp', 'k',
"keep the distribution tree around after creating " +
"archive file(s)"),
('dist-dir=', 'd',
"directory to put the source distribution archive(s) in "
"[default: dist]"),
('check-metadata', None,
"Ensure that all required elements of metadata "
"are supplied. Warn if any missing. [default]"),
('owner=', 'u',
"Owner name used when creating a tar file [default: current user]"),
('group=', 'g',
"Group name used when creating a tar file [default: current group]"),
('manifest-builders=', None,
"manifest builders (comma-separated list)"),
]
boolean_options = ['use-defaults', 'prune',
'manifest-only', 'keep-temp', 'check-metadata']
help_options = [
('help-formats', None,
"list available distribution formats", show_formats),
]
negative_opt = {'no-defaults': 'use-defaults',
'no-prune': 'prune'}
default_format = {'posix': 'gztar',
'nt': 'zip'}
def initialize_options(self):
self.manifest = None
# 'use_defaults': if true, we will include the default file set
# in the manifest
self.use_defaults = True
self.prune = True
self.manifest_only = False
self.formats = None
self.keep_temp = False
self.dist_dir = None
self.archive_files = None
self.metadata_check = True
self.owner = None
self.group = None
self.filelist = None
self.manifest_builders = None
def _check_archive_formats(self, formats):
supported_formats = [name for name, desc in get_archive_formats()]
for format in formats:
if format not in supported_formats:
return format
return None
def finalize_options(self):
if self.manifest is None:
self.manifest = "MANIFEST"
self.ensure_string_list('formats')
if self.formats is None:
try:
self.formats = [self.default_format[os.name]]
except KeyError:
raise PackagingPlatformError("don't know how to create source "
"distributions on platform %s" % os.name)
bad_format = self._check_archive_formats(self.formats)
if bad_format:
raise PackagingOptionError("unknown archive format '%s'" \
% bad_format)
if self.dist_dir is None:
self.dist_dir = "dist"
if self.filelist is None:
self.filelist = Manifest()
if self.manifest_builders is None:
self.manifest_builders = []
else:
if isinstance(self.manifest_builders, str):
self.manifest_builders = self.manifest_builders.split(',')
builders = []
for builder in self.manifest_builders:
builder = builder.strip()
if builder == '':
continue
try:
builder = resolve_name(builder)
except ImportError as e:
raise PackagingModuleError(e)
builders.append(builder)
self.manifest_builders = builders
def run(self):
# 'filelist' contains the list of files that will make up the
# manifest
self.filelist.clear()
# Check the package metadata
if self.metadata_check:
self.run_command('check')
# Do whatever it takes to get the list of files to process
# (process the manifest template, read an existing manifest,
# whatever). File list is accumulated in 'self.filelist'.
self.get_file_list()
# If user just wanted us to regenerate the manifest, stop now.
if self.manifest_only:
return
# Otherwise, go ahead and create the source distribution tarball,
# or zipfile, or whatever.
self.make_distribution()
def get_file_list(self):
"""Figure out the list of files to include in the source
distribution, and put it in 'self.filelist'. This might involve
reading the manifest template (and writing the manifest), or just
reading the manifest, or just using the default file set -- it all
depends on the user's options.
"""
template_exists = len(self.distribution.extra_files) > 0
if not template_exists:
logger.warning('%s: using default file list',
self.get_command_name())
self.filelist.findall()
if self.use_defaults:
self.add_defaults()
if template_exists:
template = '\n'.join(self.distribution.extra_files)
self.filelist.read_template(StringIO(template))
# call manifest builders, if any.
for builder in self.manifest_builders:
builder(self.distribution, self.filelist)
if self.prune:
self.prune_file_list()
self.filelist.write(self.manifest)
def add_defaults(self):
"""Add all the default files to self.filelist:
- README or README.txt
- test/test*.py
- all pure Python modules mentioned in setup script
- all files pointed by package_data (build_py)
- all files defined in data_files.
- all files defined as scripts.
- all C sources listed as part of extensions or C libraries
in the setup script (doesn't catch C headers!)
Warns if (README or README.txt) or setup.py are missing; everything
else is optional.
"""
standards = [('README', 'README.txt')]
for fn in standards:
if isinstance(fn, tuple):
alts = fn
got_it = False
for fn in alts:
if os.path.exists(fn):
got_it = True
self.filelist.append(fn)
break
if not got_it:
logger.warning(
'%s: standard file not found: should have one of %s',
self.get_command_name(), ', '.join(alts))
else:
if os.path.exists(fn):
self.filelist.append(fn)
else:
logger.warning('%s: standard file %r not found',
self.get_command_name(), fn)
optional = ['test/test*.py', 'setup.cfg']
for pattern in optional:
files = [f for f in glob(pattern) if os.path.isfile(f)]
if files:
self.filelist.extend(files)
for cmd_name in get_command_names():
try:
cmd_obj = self.get_finalized_command(cmd_name)
except PackagingOptionError:
pass
else:
self.filelist.extend(cmd_obj.get_source_files())
def prune_file_list(self):
"""Prune off branches that might slip into the file list as created
by 'read_template()', but really don't belong there:
* the build tree (typically "build")
* the release tree itself (only an issue if we ran "sdist"
previously with --keep-temp, or it aborted)
* any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories
"""
build = self.get_finalized_command('build')
base_dir = self.distribution.get_fullname()
self.filelist.exclude_pattern(None, prefix=build.build_base)
self.filelist.exclude_pattern(None, prefix=base_dir)
# pruning out vcs directories
# both separators are used under win32
if sys.platform == 'win32':
seps = r'/|\\'
else:
seps = '/'
vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr',
'_darcs']
vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps)
self.filelist.exclude_pattern(vcs_ptrn, is_regex=True)
def make_release_tree(self, base_dir, files):
"""Create the directory tree that will become the source
distribution archive. All directories implied by the filenames in
'files' are created under 'base_dir', and then we hard link or copy
(if hard linking is unavailable) those files into place.
Essentially, this duplicates the developer's source tree, but in a
directory named after the distribution, containing only the files
to be distributed.
"""
# Create all the directories under 'base_dir' necessary to
# put 'files' there; the 'mkpath()' is just so we don't die
# if the manifest happens to be empty.
self.mkpath(base_dir)
self.create_tree(base_dir, files, dry_run=self.dry_run)
# And walk over the list of files, either making a hard link (if
# os.link exists) to each one that doesn't already exist in its
# corresponding location under 'base_dir', or copying each file
# that's out-of-date in 'base_dir'. (Usually, all files will be
# out-of-date, because by default we blow away 'base_dir' when
# we're done making the distribution archives.)
if hasattr(os, 'link'): # can make hard links on this system
link = 'hard'
msg = "making hard links in %s..." % base_dir
else: # nope, have to copy
link = None
msg = "copying files to %s..." % base_dir
if not files:
logger.warning("no files to distribute -- empty manifest?")
else:
logger.info(msg)
for file in self.distribution.metadata.requires_files:
if file not in files:
msg = "'%s' must be included explicitly in 'extra_files'" \
% file
raise PackagingFileError(msg)
for file in files:
if not os.path.isfile(file):
logger.warning("'%s' not a regular file -- skipping", file)
else:
dest = os.path.join(base_dir, file)
self.copy_file(file, dest, link=link)
self.distribution.metadata.write(os.path.join(base_dir, 'PKG-INFO'))
def make_distribution(self):
"""Create the source distribution(s). First, we create the release
tree with 'make_release_tree()'; then, we create all required
archive files (according to 'self.formats') from the release tree.
Finally, we clean up by blowing away the release tree (unless
'self.keep_temp' is true). The list of archive files created is
stored so it can be retrieved later by 'get_archive_files()'.
"""
# Don't warn about missing metadata here -- should be (and is!)
# done elsewhere.
base_dir = self.distribution.get_fullname()
base_name = os.path.join(self.dist_dir, base_dir)
self.make_release_tree(base_dir, self.filelist.files)
archive_files = [] # remember names of files we create
# tar archive must be created last to avoid overwrite and remove
if 'tar' in self.formats:
self.formats.append(self.formats.pop(self.formats.index('tar')))
for fmt in self.formats:
file = self.make_archive(base_name, fmt, base_dir=base_dir,
owner=self.owner, group=self.group)
archive_files.append(file)
self.distribution.dist_files.append(('sdist', '', file))
self.archive_files = archive_files
if not self.keep_temp:
if self.dry_run:
logger.info('removing %s', base_dir)
else:
rmtree(base_dir)
def get_archive_files(self):
"""Return the list of archive files created when the command
was run, or None if the command hasn't run yet.
"""
return self.archive_files
def create_tree(self, base_dir, files, mode=0o777, verbose=1,
dry_run=False):
need_dir = set()
for file in files:
need_dir.add(os.path.join(base_dir, os.path.dirname(file)))
# Now create them
for dir in sorted(need_dir):
self.mkpath(dir, mode, verbose=verbose, dry_run=dry_run)

View file

@ -0,0 +1,81 @@
"""Run the project's test suite."""
import os
import sys
import logging
import unittest
from packaging import logger
from packaging.command.cmd import Command
from packaging.database import get_distribution
from packaging.errors import PackagingOptionError
from packaging.util import resolve_name
class test(Command):
description = "run the project's test suite"
user_options = [
('suite=', 's',
"test suite to run (for example: 'some_module.test_suite')"),
('runner=', None,
"test runner to be called."),
('tests-require=', None,
"list of distributions required to run the test suite."),
]
def initialize_options(self):
self.suite = None
self.runner = None
self.tests_require = []
def finalize_options(self):
self.build_lib = self.get_finalized_command("build").build_lib
for requirement in self.tests_require:
if get_distribution(requirement) is None:
logger.warning("test dependency %s is not installed, "
"tests may fail", requirement)
if (not self.suite and not self.runner and
self.get_ut_with_discovery() is None):
raise PackagingOptionError(
"no test discovery available, please give a 'suite' or "
"'runner' option or install unittest2")
def get_ut_with_discovery(self):
if hasattr(unittest.TestLoader, "discover"):
return unittest
else:
try:
import unittest2
return unittest2
except ImportError:
return None
def run(self):
prev_syspath = sys.path[:]
try:
# build release
build = self.get_reinitialized_command('build')
self.run_command('build')
sys.path.insert(0, build.build_lib)
# Temporary kludge until we remove the verbose arguments and use
# logging everywhere
logger = logging.getLogger('packaging')
verbose = logger.getEffectiveLevel() >= logging.DEBUG
verbosity = verbose + 1
# run the tests
if self.runner:
resolve_name(self.runner)()
elif self.suite:
runner = unittest.TextTestRunner(verbosity=verbosity)
runner.run(resolve_name(self.suite)())
elif self.get_ut_with_discovery():
ut = self.get_ut_with_discovery()
test_suite = ut.TestLoader().discover(os.curdir)
runner = ut.TextTestRunner(verbosity=verbosity)
runner.run(test_suite)
finally:
sys.path[:] = prev_syspath

View file

@ -0,0 +1,201 @@
"""Upload a distribution to a project index."""
import os
import socket
import logging
import platform
import urllib.parse
from io import BytesIO
from base64 import standard_b64encode
from hashlib import md5
from urllib.error import HTTPError
from urllib.request import urlopen, Request
from packaging import logger
from packaging.errors import PackagingOptionError
from packaging.util import (spawn, read_pypirc, DEFAULT_REPOSITORY,
DEFAULT_REALM)
from packaging.command.cmd import Command
class upload(Command):
description = "upload distribution to PyPI"
user_options = [
('repository=', 'r',
"repository URL [default: %s]" % DEFAULT_REPOSITORY),
('show-response', None,
"display full response text from server"),
('sign', 's',
"sign files to upload using gpg"),
('identity=', 'i',
"GPG identity used to sign files"),
('upload-docs', None,
"upload documentation too"),
]
boolean_options = ['show-response', 'sign']
def initialize_options(self):
self.repository = None
self.realm = None
self.show_response = False
self.username = ''
self.password = ''
self.show_response = False
self.sign = False
self.identity = None
self.upload_docs = False
def finalize_options(self):
if self.repository is None:
self.repository = DEFAULT_REPOSITORY
if self.realm is None:
self.realm = DEFAULT_REALM
if self.identity and not self.sign:
raise PackagingOptionError(
"Must use --sign for --identity to have meaning")
config = read_pypirc(self.repository, self.realm)
if config != {}:
self.username = config['username']
self.password = config['password']
self.repository = config['repository']
self.realm = config['realm']
# getting the password from the distribution
# if previously set by the register command
if not self.password and self.distribution.password:
self.password = self.distribution.password
def run(self):
if not self.distribution.dist_files:
raise PackagingOptionError(
"No dist file created in earlier command")
for command, pyversion, filename in self.distribution.dist_files:
self.upload_file(command, pyversion, filename)
if self.upload_docs:
upload_docs = self.get_finalized_command("upload_docs")
upload_docs.repository = self.repository
upload_docs.username = self.username
upload_docs.password = self.password
upload_docs.run()
# XXX to be refactored with register.post_to_server
def upload_file(self, command, pyversion, filename):
# Makes sure the repository URL is compliant
scheme, netloc, url, params, query, fragments = \
urllib.parse.urlparse(self.repository)
if params or query or fragments:
raise AssertionError("Incompatible url %s" % self.repository)
if scheme not in ('http', 'https'):
raise AssertionError("unsupported scheme " + scheme)
# Sign if requested
if self.sign:
gpg_args = ["gpg", "--detach-sign", "-a", filename]
if self.identity:
gpg_args[2:2] = ["--local-user", self.identity]
spawn(gpg_args,
dry_run=self.dry_run)
# Fill in the data - send all the metadata in case we need to
# register a new release
with open(filename, 'rb') as f:
content = f.read()
data = self.distribution.metadata.todict()
# extra upload infos
data[':action'] = 'file_upload'
data['protcol_version'] = '1'
data['content'] = (os.path.basename(filename), content)
data['filetype'] = command
data['pyversion'] = pyversion
data['md5_digest'] = md5(content).hexdigest()
if command == 'bdist_dumb':
data['comment'] = 'built for %s' % platform.platform(terse=True)
if self.sign:
with open(filename + '.asc') as fp:
sig = fp.read()
data['gpg_signature'] = [
(os.path.basename(filename) + ".asc", sig)]
# set up the authentication
# The exact encoding of the authentication string is debated.
# Anyway PyPI only accepts ascii for both username or password.
user_pass = (self.username + ":" + self.password).encode('ascii')
auth = b"Basic " + standard_b64encode(user_pass)
# Build up the MIME payload for the POST data
boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
sep_boundary = b'\n--' + boundary
end_boundary = sep_boundary + b'--'
body = BytesIO()
file_fields = ('content', 'gpg_signature')
for key, value in data.items():
# handle multiple entries for the same name
if not isinstance(value, tuple):
value = [value]
content_dispo = '\nContent-Disposition: form-data; name="%s"' % key
if key in file_fields:
filename_, content = value
filename_ = ';filename="%s"' % filename_
body.write(sep_boundary)
body.write(content_dispo.encode('utf-8'))
body.write(filename_.encode('utf-8'))
body.write(b"\n\n")
body.write(content)
else:
for value in value:
value = str(value).encode('utf-8')
body.write(sep_boundary)
body.write(content_dispo.encode('utf-8'))
body.write(b"\n\n")
body.write(value)
if value and value.endswith(b'\r'):
# write an extra newline (lurve Macs)
body.write(b'\n')
body.write(end_boundary)
body.write(b"\n")
body = body.getvalue()
logger.info("Submitting %s to %s", filename, self.repository)
# build the Request
headers = {'Content-type':
'multipart/form-data; boundary=%s' %
boundary.decode('ascii'),
'Content-length': str(len(body)),
'Authorization': auth}
request = Request(self.repository, data=body,
headers=headers)
# send the data
try:
result = urlopen(request)
status = result.code
reason = result.msg
except socket.error as e:
logger.error(e)
return
except HTTPError as e:
status = e.code
reason = e.msg
if status == 200:
logger.info('Server response (%s): %s', status, reason)
else:
logger.error('Upload failed (%s): %s', status, reason)
if self.show_response and logger.isEnabledFor(logging.INFO):
sep = '-' * 75
logger.info('%s\n%s\n%s', sep, result.read().decode(), sep)

View file

@ -0,0 +1,173 @@
"""Upload HTML documentation to a project index."""
import os
import base64
import socket
import zipfile
import logging
import http.client
import urllib.parse
from io import BytesIO
from packaging import logger
from packaging.util import read_pypirc, DEFAULT_REPOSITORY, DEFAULT_REALM
from packaging.errors import PackagingFileError
from packaging.command.cmd import Command
def zip_dir(directory):
"""Compresses recursively contents of directory into a BytesIO object"""
destination = BytesIO()
zip_file = zipfile.ZipFile(destination, "w")
for root, dirs, files in os.walk(directory):
for name in files:
full = os.path.join(root, name)
relative = root[len(directory):].lstrip(os.path.sep)
dest = os.path.join(relative, name)
zip_file.write(full, dest)
zip_file.close()
return destination
# grabbed from
# http://code.activestate.com/recipes/
# 146306-http-client-to-post-using-multipartform-data/
# TODO factor this out for use by install and command/upload
def encode_multipart(fields, files, boundary=None):
"""
*fields* is a sequence of (name: str, value: str) elements for regular
form fields, *files* is a sequence of (name: str, filename: str, value:
bytes) elements for data to be uploaded as files.
Returns (content_type: bytes, body: bytes) ready for http.client.HTTP.
"""
if boundary is None:
boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
elif not isinstance(boundary, bytes):
raise TypeError('boundary is not bytes but %r' % type(boundary))
l = []
for key, value in fields:
l.extend((
b'--' + boundary,
('Content-Disposition: form-data; name="%s"' %
key).encode('utf-8'),
b'',
value.encode('utf-8')))
for key, filename, value in files:
l.extend((
b'--' + boundary,
('Content-Disposition: form-data; name="%s"; filename="%s"' %
(key, filename)).encode('utf-8'),
b'',
value))
l.append(b'--' + boundary + b'--')
l.append(b'')
body = b'\r\n'.join(l)
content_type = b'multipart/form-data; boundary=' + boundary
return content_type, body
class upload_docs(Command):
description = "upload HTML documentation to PyPI"
user_options = [
('repository=', 'r',
"repository URL [default: %s]" % DEFAULT_REPOSITORY),
('show-response', None,
"display full response text from server"),
('upload-dir=', None,
"directory to upload"),
]
def initialize_options(self):
self.repository = None
self.realm = None
self.show_response = False
self.upload_dir = None
self.username = ''
self.password = ''
def finalize_options(self):
if self.repository is None:
self.repository = DEFAULT_REPOSITORY
if self.realm is None:
self.realm = DEFAULT_REALM
if self.upload_dir is None:
build = self.get_finalized_command('build')
self.upload_dir = os.path.join(build.build_base, "docs")
if not os.path.isdir(self.upload_dir):
self.upload_dir = os.path.join(build.build_base, "doc")
logger.info('Using upload directory %s', self.upload_dir)
self.verify_upload_dir(self.upload_dir)
config = read_pypirc(self.repository, self.realm)
if config != {}:
self.username = config['username']
self.password = config['password']
self.repository = config['repository']
self.realm = config['realm']
def verify_upload_dir(self, upload_dir):
self.ensure_dirname('upload_dir')
index_location = os.path.join(upload_dir, "index.html")
if not os.path.exists(index_location):
mesg = "No 'index.html found in docs directory (%s)"
raise PackagingFileError(mesg % upload_dir)
def run(self):
name = self.distribution.metadata['Name']
version = self.distribution.metadata['Version']
zip_file = zip_dir(self.upload_dir)
fields = [(':action', 'doc_upload'),
('name', name), ('version', version)]
files = [('content', name, zip_file.getvalue())]
content_type, body = encode_multipart(fields, files)
credentials = self.username + ':' + self.password
auth = b"Basic " + base64.encodebytes(credentials.encode()).strip()
logger.info("Submitting documentation to %s", self.repository)
scheme, netloc, url, params, query, fragments = urllib.parse.urlparse(
self.repository)
if scheme == "http":
conn = http.client.HTTPConnection(netloc)
elif scheme == "https":
conn = http.client.HTTPSConnection(netloc)
else:
raise AssertionError("unsupported scheme %r" % scheme)
try:
conn.connect()
conn.putrequest("POST", url)
conn.putheader('Content-type', content_type)
conn.putheader('Content-length', str(len(body)))
conn.putheader('Authorization', auth)
conn.endheaders()
conn.send(body)
except socket.error as e:
logger.error(e)
return
r = conn.getresponse()
if r.status == 200:
logger.info('Server response (%s): %s', r.status, r.reason)
elif r.status == 301:
location = r.getheader('Location')
if location is None:
location = 'http://packages.python.org/%s/' % name
logger.info('Upload successful. Visit %s', location)
else:
logger.error('Upload failed (%s): %s', r.status, r.reason)
if self.show_response and logger.isEnabledFor(logging.INFO):
sep = '-' * 75
logger.info('%s\n%s\n%s', sep, r.read().decode('utf-8'), sep)

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.