mirror of
https://github.com/python/cpython.git
synced 2025-07-23 03:05:38 +00:00
Standardize whitespace in function calls.
This commit is contained in:
parent
449f5568b7
commit
071ed76732
4 changed files with 145 additions and 147 deletions
|
@ -59,13 +59,13 @@ class Command:
|
||||||
# late import because of mutual dependence between these classes
|
# late import because of mutual dependence between these classes
|
||||||
from distutils.dist import Distribution
|
from distutils.dist import Distribution
|
||||||
|
|
||||||
if not isinstance (dist, Distribution):
|
if not isinstance(dist, Distribution):
|
||||||
raise TypeError, "dist must be a Distribution instance"
|
raise TypeError, "dist must be a Distribution instance"
|
||||||
if self.__class__ is Command:
|
if self.__class__ is Command:
|
||||||
raise RuntimeError, "Command is an abstract class"
|
raise RuntimeError, "Command is an abstract class"
|
||||||
|
|
||||||
self.distribution = dist
|
self.distribution = dist
|
||||||
self.initialize_options ()
|
self.initialize_options()
|
||||||
|
|
||||||
# Per-command versions of the global flags, so that the user can
|
# Per-command versions of the global flags, so that the user can
|
||||||
# customize Distutils' behaviour command-by-command and let some
|
# customize Distutils' behaviour command-by-command and let some
|
||||||
|
@ -98,9 +98,9 @@ class Command:
|
||||||
|
|
||||||
def __getattr__ (self, attr):
|
def __getattr__ (self, attr):
|
||||||
if attr in ('verbose', 'dry_run'):
|
if attr in ('verbose', 'dry_run'):
|
||||||
myval = getattr (self, "_" + attr)
|
myval = getattr(self, "_" + attr)
|
||||||
if myval is None:
|
if myval is None:
|
||||||
return getattr (self.distribution, attr)
|
return getattr(self.distribution, attr)
|
||||||
else:
|
else:
|
||||||
return myval
|
return myval
|
||||||
else:
|
else:
|
||||||
|
@ -109,7 +109,7 @@ class Command:
|
||||||
|
|
||||||
def ensure_finalized (self):
|
def ensure_finalized (self):
|
||||||
if not self.finalized:
|
if not self.finalized:
|
||||||
self.finalize_options ()
|
self.finalize_options()
|
||||||
self.finalized = 1
|
self.finalized = 1
|
||||||
|
|
||||||
|
|
||||||
|
@ -273,7 +273,7 @@ class Command:
|
||||||
# -- Convenience methods for commands ------------------------------
|
# -- Convenience methods for commands ------------------------------
|
||||||
|
|
||||||
def get_command_name (self):
|
def get_command_name (self):
|
||||||
if hasattr (self, 'command_name'):
|
if hasattr(self, 'command_name'):
|
||||||
return self.command_name
|
return self.command_name
|
||||||
else:
|
else:
|
||||||
return self.__class__.__name__
|
return self.__class__.__name__
|
||||||
|
@ -296,12 +296,12 @@ class Command:
|
||||||
|
|
||||||
# Option_pairs: list of (src_option, dst_option) tuples
|
# Option_pairs: list of (src_option, dst_option) tuples
|
||||||
|
|
||||||
src_cmd_obj = self.distribution.get_command_obj (src_cmd)
|
src_cmd_obj = self.distribution.get_command_obj(src_cmd)
|
||||||
src_cmd_obj.ensure_finalized ()
|
src_cmd_obj.ensure_finalized()
|
||||||
for (src_option, dst_option) in option_pairs:
|
for (src_option, dst_option) in option_pairs:
|
||||||
if getattr (self, dst_option) is None:
|
if getattr(self, dst_option) is None:
|
||||||
setattr (self, dst_option,
|
setattr(self, dst_option,
|
||||||
getattr (src_cmd_obj, src_option))
|
getattr(src_cmd_obj, src_option))
|
||||||
|
|
||||||
|
|
||||||
def get_finalized_command (self, command, create=1):
|
def get_finalized_command (self, command, create=1):
|
||||||
|
@ -310,8 +310,8 @@ class Command:
|
||||||
'command', call its 'ensure_finalized()' method, and return the
|
'command', call its 'ensure_finalized()' method, and return the
|
||||||
finalized command object.
|
finalized command object.
|
||||||
"""
|
"""
|
||||||
cmd_obj = self.distribution.get_command_obj (command, create)
|
cmd_obj = self.distribution.get_command_obj(command, create)
|
||||||
cmd_obj.ensure_finalized ()
|
cmd_obj.ensure_finalized()
|
||||||
return cmd_obj
|
return cmd_obj
|
||||||
|
|
||||||
# XXX rename to 'get_reinitialized_command()'? (should do the
|
# XXX rename to 'get_reinitialized_command()'? (should do the
|
||||||
|
@ -325,7 +325,7 @@ class Command:
|
||||||
Distribution, which creates and finalizes the command object if
|
Distribution, which creates and finalizes the command object if
|
||||||
necessary and then invokes its 'run()' method.
|
necessary and then invokes its 'run()' method.
|
||||||
"""
|
"""
|
||||||
self.distribution.run_command (command)
|
self.distribution.run_command(command)
|
||||||
|
|
||||||
|
|
||||||
def get_sub_commands (self):
|
def get_sub_commands (self):
|
||||||
|
@ -345,8 +345,8 @@ class Command:
|
||||||
# -- External world manipulation -----------------------------------
|
# -- External world manipulation -----------------------------------
|
||||||
|
|
||||||
def warn (self, msg):
|
def warn (self, msg):
|
||||||
sys.stderr.write ("warning: %s: %s\n" %
|
sys.stderr.write("warning: %s: %s\n" %
|
||||||
(self.get_command_name(), msg))
|
(self.get_command_name(), msg))
|
||||||
|
|
||||||
|
|
||||||
def execute (self, func, args, msg=None, level=1):
|
def execute (self, func, args, msg=None, level=1):
|
||||||
|
@ -389,17 +389,17 @@ class Command:
|
||||||
|
|
||||||
def move_file (self, src, dst, level=1):
|
def move_file (self, src, dst, level=1):
|
||||||
"""Move a file respecting verbose and dry-run flags."""
|
"""Move a file respecting verbose and dry-run flags."""
|
||||||
return file_util.move_file (src, dst,
|
return file_util.move_file(src, dst,
|
||||||
self.verbose >= level,
|
self.verbose >= level,
|
||||||
self.dry_run)
|
self.dry_run)
|
||||||
|
|
||||||
|
|
||||||
def spawn (self, cmd, search_path=1, level=1):
|
def spawn (self, cmd, search_path=1, level=1):
|
||||||
"""Spawn an external command respecting verbose and dry-run flags."""
|
"""Spawn an external command respecting verbose and dry-run flags."""
|
||||||
from distutils.spawn import spawn
|
from distutils.spawn import spawn
|
||||||
spawn (cmd, search_path,
|
spawn(cmd, search_path,
|
||||||
self.verbose >= level,
|
self.verbose >= level,
|
||||||
self.dry_run)
|
self.dry_run)
|
||||||
|
|
||||||
|
|
||||||
def make_archive (self, base_name, format,
|
def make_archive (self, base_name, format,
|
||||||
|
@ -421,15 +421,15 @@ class Command:
|
||||||
"""
|
"""
|
||||||
if exec_msg is None:
|
if exec_msg is None:
|
||||||
exec_msg = "generating %s from %s" % \
|
exec_msg = "generating %s from %s" % \
|
||||||
(outfile, string.join (infiles, ', '))
|
(outfile, string.join(infiles, ', '))
|
||||||
if skip_msg is None:
|
if skip_msg is None:
|
||||||
skip_msg = "skipping %s (inputs unchanged)" % outfile
|
skip_msg = "skipping %s (inputs unchanged)" % outfile
|
||||||
|
|
||||||
|
|
||||||
# Allow 'infiles' to be a single string
|
# Allow 'infiles' to be a single string
|
||||||
if type (infiles) is StringType:
|
if type(infiles) is StringType:
|
||||||
infiles = (infiles,)
|
infiles = (infiles,)
|
||||||
elif type (infiles) not in (ListType, TupleType):
|
elif type(infiles) not in (ListType, TupleType):
|
||||||
raise TypeError, \
|
raise TypeError, \
|
||||||
"'infiles' must be a string, or a list or tuple of strings"
|
"'infiles' must be a string, or a list or tuple of strings"
|
||||||
|
|
||||||
|
@ -437,11 +437,11 @@ class Command:
|
||||||
# exist, is out-of-date, or the 'force' flag is true) then
|
# exist, is out-of-date, or the 'force' flag is true) then
|
||||||
# perform the action that presumably regenerates it
|
# perform the action that presumably regenerates it
|
||||||
if self.force or dep_util.newer_group (infiles, outfile):
|
if self.force or dep_util.newer_group (infiles, outfile):
|
||||||
self.execute (func, args, exec_msg, level)
|
self.execute(func, args, exec_msg, level)
|
||||||
|
|
||||||
# Otherwise, print the "skip" message
|
# Otherwise, print the "skip" message
|
||||||
else:
|
else:
|
||||||
self.announce (skip_msg, level)
|
self.announce(skip_msg, level)
|
||||||
|
|
||||||
# make_file ()
|
# make_file ()
|
||||||
|
|
||||||
|
|
|
@ -40,21 +40,21 @@ def mkpath (name, mode=0777, verbose=0, dry_run=0):
|
||||||
# the creation of the whole path? (quite easy to do the latter since
|
# the creation of the whole path? (quite easy to do the latter since
|
||||||
# we're not using a recursive algorithm)
|
# we're not using a recursive algorithm)
|
||||||
|
|
||||||
name = os.path.normpath (name)
|
name = os.path.normpath(name)
|
||||||
created_dirs = []
|
created_dirs = []
|
||||||
if os.path.isdir (name) or name == '':
|
if os.path.isdir(name) or name == '':
|
||||||
return created_dirs
|
return created_dirs
|
||||||
if _path_created.get (name):
|
if _path_created.get(name):
|
||||||
return created_dirs
|
return created_dirs
|
||||||
|
|
||||||
(head, tail) = os.path.split (name)
|
(head, tail) = os.path.split(name)
|
||||||
tails = [tail] # stack of lone dirs to create
|
tails = [tail] # stack of lone dirs to create
|
||||||
|
|
||||||
while head and tail and not os.path.isdir (head):
|
while head and tail and not os.path.isdir(head):
|
||||||
#print "splitting '%s': " % head,
|
#print "splitting '%s': " % head,
|
||||||
(head, tail) = os.path.split (head)
|
(head, tail) = os.path.split(head)
|
||||||
#print "to ('%s','%s')" % (head, tail)
|
#print "to ('%s','%s')" % (head, tail)
|
||||||
tails.insert (0, tail) # push next higher dir onto stack
|
tails.insert(0, tail) # push next higher dir onto stack
|
||||||
|
|
||||||
#print "stack of tails:", tails
|
#print "stack of tails:", tails
|
||||||
|
|
||||||
|
@ -63,8 +63,8 @@ def mkpath (name, mode=0777, verbose=0, dry_run=0):
|
||||||
# that does *not* exist)
|
# that does *not* exist)
|
||||||
for d in tails:
|
for d in tails:
|
||||||
#print "head = %s, d = %s: " % (head, d),
|
#print "head = %s, d = %s: " % (head, d),
|
||||||
head = os.path.join (head, d)
|
head = os.path.join(head, d)
|
||||||
if _path_created.get (head):
|
if _path_created.get(head):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if verbose:
|
if verbose:
|
||||||
|
@ -72,7 +72,7 @@ def mkpath (name, mode=0777, verbose=0, dry_run=0):
|
||||||
|
|
||||||
if not dry_run:
|
if not dry_run:
|
||||||
try:
|
try:
|
||||||
os.mkdir (head)
|
os.mkdir(head)
|
||||||
created_dirs.append(head)
|
created_dirs.append(head)
|
||||||
except OSError, exc:
|
except OSError, exc:
|
||||||
raise DistutilsFileError, \
|
raise DistutilsFileError, \
|
||||||
|
@ -97,13 +97,13 @@ def create_tree (base_dir, files, mode=0777, verbose=0, dry_run=0):
|
||||||
# First get the list of directories to create
|
# First get the list of directories to create
|
||||||
need_dir = {}
|
need_dir = {}
|
||||||
for file in files:
|
for file in files:
|
||||||
need_dir[os.path.join (base_dir, os.path.dirname (file))] = 1
|
need_dir[os.path.join(base_dir, os.path.dirname(file))] = 1
|
||||||
need_dirs = need_dir.keys()
|
need_dirs = need_dir.keys()
|
||||||
need_dirs.sort()
|
need_dirs.sort()
|
||||||
|
|
||||||
# Now create them
|
# Now create them
|
||||||
for dir in need_dirs:
|
for dir in need_dirs:
|
||||||
mkpath (dir, mode, verbose, dry_run)
|
mkpath(dir, mode, verbose, dry_run)
|
||||||
|
|
||||||
# create_tree ()
|
# create_tree ()
|
||||||
|
|
||||||
|
@ -136,11 +136,11 @@ def copy_tree (src, dst,
|
||||||
|
|
||||||
from distutils.file_util import copy_file
|
from distutils.file_util import copy_file
|
||||||
|
|
||||||
if not dry_run and not os.path.isdir (src):
|
if not dry_run and not os.path.isdir(src):
|
||||||
raise DistutilsFileError, \
|
raise DistutilsFileError, \
|
||||||
"cannot copy tree '%s': not a directory" % src
|
"cannot copy tree '%s': not a directory" % src
|
||||||
try:
|
try:
|
||||||
names = os.listdir (src)
|
names = os.listdir(src)
|
||||||
except os.error, (errno, errstr):
|
except os.error, (errno, errstr):
|
||||||
if dry_run:
|
if dry_run:
|
||||||
names = []
|
names = []
|
||||||
|
@ -149,32 +149,32 @@ def copy_tree (src, dst,
|
||||||
"error listing files in '%s': %s" % (src, errstr)
|
"error listing files in '%s': %s" % (src, errstr)
|
||||||
|
|
||||||
if not dry_run:
|
if not dry_run:
|
||||||
mkpath (dst, verbose=verbose)
|
mkpath(dst, verbose=verbose)
|
||||||
|
|
||||||
outputs = []
|
outputs = []
|
||||||
|
|
||||||
for n in names:
|
for n in names:
|
||||||
src_name = os.path.join (src, n)
|
src_name = os.path.join(src, n)
|
||||||
dst_name = os.path.join (dst, n)
|
dst_name = os.path.join(dst, n)
|
||||||
|
|
||||||
if preserve_symlinks and os.path.islink (src_name):
|
if preserve_symlinks and os.path.islink(src_name):
|
||||||
link_dest = os.readlink (src_name)
|
link_dest = os.readlink(src_name)
|
||||||
if verbose:
|
if verbose:
|
||||||
print "linking %s -> %s" % (dst_name, link_dest)
|
print "linking %s -> %s" % (dst_name, link_dest)
|
||||||
if not dry_run:
|
if not dry_run:
|
||||||
os.symlink (link_dest, dst_name)
|
os.symlink(link_dest, dst_name)
|
||||||
outputs.append (dst_name)
|
outputs.append(dst_name)
|
||||||
|
|
||||||
elif os.path.isdir (src_name):
|
elif os.path.isdir(src_name):
|
||||||
outputs.extend (
|
outputs.extend(
|
||||||
copy_tree (src_name, dst_name,
|
copy_tree(src_name, dst_name,
|
||||||
preserve_mode, preserve_times, preserve_symlinks,
|
preserve_mode, preserve_times, preserve_symlinks,
|
||||||
update, verbose, dry_run))
|
update, verbose, dry_run))
|
||||||
else:
|
else:
|
||||||
copy_file (src_name, dst_name,
|
copy_file(src_name, dst_name,
|
||||||
preserve_mode, preserve_times,
|
preserve_mode, preserve_times,
|
||||||
update, None, verbose, dry_run)
|
update, None, verbose, dry_run)
|
||||||
outputs.append (dst_name)
|
outputs.append(dst_name)
|
||||||
|
|
||||||
return outputs
|
return outputs
|
||||||
|
|
||||||
|
|
|
@ -22,14 +22,14 @@ from distutils.errors import *
|
||||||
# utilities, we use '-' in place of '_'. (The spirit of LISP lives on!)
|
# utilities, we use '-' in place of '_'. (The spirit of LISP lives on!)
|
||||||
# The similarities to NAME are again not a coincidence...
|
# The similarities to NAME are again not a coincidence...
|
||||||
longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
|
longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
|
||||||
longopt_re = re.compile (r'^%s$' % longopt_pat)
|
longopt_re = re.compile(r'^%s$' % longopt_pat)
|
||||||
|
|
||||||
# For recognizing "negative alias" options, eg. "quiet=!verbose"
|
# For recognizing "negative alias" options, eg. "quiet=!verbose"
|
||||||
neg_alias_re = re.compile ("^(%s)=!(%s)$" % (longopt_pat, longopt_pat))
|
neg_alias_re = re.compile("^(%s)=!(%s)$" % (longopt_pat, longopt_pat))
|
||||||
|
|
||||||
# This is used to translate long options to legitimate Python identifiers
|
# This is used to translate long options to legitimate Python identifiers
|
||||||
# (for use as attributes of some object).
|
# (for use as attributes of some object).
|
||||||
longopt_xlate = string.maketrans ('-', '_')
|
longopt_xlate = string.maketrans('-', '_')
|
||||||
|
|
||||||
# This records (option, value) pairs in the order seen on the command line;
|
# This records (option, value) pairs in the order seen on the command line;
|
||||||
# it's close to what getopt.getopt() returns, but with short options
|
# it's close to what getopt.getopt() returns, but with short options
|
||||||
|
@ -107,7 +107,7 @@ class FancyGetopt:
|
||||||
"option conflict: already an option '%s'" % long_option
|
"option conflict: already an option '%s'" % long_option
|
||||||
else:
|
else:
|
||||||
option = (long_option, short_option, help_string)
|
option = (long_option, short_option, help_string)
|
||||||
self.option_table.append (option)
|
self.option_table.append(option)
|
||||||
self.option_index[long_option] = option
|
self.option_index[long_option] = option
|
||||||
|
|
||||||
|
|
||||||
|
@ -120,7 +120,7 @@ class FancyGetopt:
|
||||||
"""Translate long option name 'long_option' to the form it
|
"""Translate long option name 'long_option' to the form it
|
||||||
has as an attribute of some object: ie., translate hyphens
|
has as an attribute of some object: ie., translate hyphens
|
||||||
to underscores."""
|
to underscores."""
|
||||||
return string.translate (long_option, longopt_xlate)
|
return string.translate(long_option, longopt_xlate)
|
||||||
|
|
||||||
|
|
||||||
def _check_alias_dict (self, aliases, what):
|
def _check_alias_dict (self, aliases, what):
|
||||||
|
@ -137,7 +137,7 @@ class FancyGetopt:
|
||||||
|
|
||||||
def set_aliases (self, alias):
|
def set_aliases (self, alias):
|
||||||
"""Set the aliases for this option parser."""
|
"""Set the aliases for this option parser."""
|
||||||
self._check_alias_dict (alias, "alias")
|
self._check_alias_dict(alias, "alias")
|
||||||
self.alias = alias
|
self.alias = alias
|
||||||
|
|
||||||
def set_negative_aliases (self, negative_alias):
|
def set_negative_aliases (self, negative_alias):
|
||||||
|
@ -145,15 +145,15 @@ class FancyGetopt:
|
||||||
'negative_alias' should be a dictionary mapping option names to
|
'negative_alias' should be a dictionary mapping option names to
|
||||||
option names, both the key and value must already be defined
|
option names, both the key and value must already be defined
|
||||||
in the option table."""
|
in the option table."""
|
||||||
self._check_alias_dict (negative_alias, "negative alias")
|
self._check_alias_dict(negative_alias, "negative alias")
|
||||||
self.negative_alias = negative_alias
|
self.negative_alias = negative_alias
|
||||||
|
|
||||||
|
|
||||||
def _grok_option_table (self):
|
def _grok_option_table (self):
|
||||||
"""Populate the various data structures that keep tabs on
|
"""Populate the various data structures that keep tabs on the
|
||||||
the option table. Called by 'getopt()' before it can do
|
option table. Called by 'getopt()' before it can do anything
|
||||||
anything worthwhile."""
|
worthwhile.
|
||||||
|
"""
|
||||||
self.long_opts = []
|
self.long_opts = []
|
||||||
self.short_opts = []
|
self.short_opts = []
|
||||||
self.short2long.clear()
|
self.short2long.clear()
|
||||||
|
@ -163,7 +163,7 @@ class FancyGetopt:
|
||||||
(long, short, help) = option
|
(long, short, help) = option
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise DistutilsGetoptError, \
|
raise DistutilsGetoptError, \
|
||||||
"invalid option tuple " + str (option)
|
"invalid option tuple " + str(option)
|
||||||
|
|
||||||
# Type- and value-check the option names
|
# Type- and value-check the option names
|
||||||
if type(long) is not StringType or len(long) < 2:
|
if type(long) is not StringType or len(long) < 2:
|
||||||
|
@ -172,12 +172,12 @@ class FancyGetopt:
|
||||||
"must be a string of length >= 2") % long
|
"must be a string of length >= 2") % long
|
||||||
|
|
||||||
if (not ((short is None) or
|
if (not ((short is None) or
|
||||||
(type (short) is StringType and len (short) == 1))):
|
(type(short) is StringType and len(short) == 1))):
|
||||||
raise DistutilsGetoptError, \
|
raise DistutilsGetoptError, \
|
||||||
("invalid short option '%s': "
|
("invalid short option '%s': "
|
||||||
"must a single character or None") % short
|
"must a single character or None") % short
|
||||||
|
|
||||||
self.long_opts.append (long)
|
self.long_opts.append(long)
|
||||||
|
|
||||||
if long[-1] == '=': # option takes an argument?
|
if long[-1] == '=': # option takes an argument?
|
||||||
if short: short = short + ':'
|
if short: short = short + ':'
|
||||||
|
@ -216,14 +216,14 @@ class FancyGetopt:
|
||||||
# later translate it to an attribute name on some object. Have
|
# later translate it to an attribute name on some object. Have
|
||||||
# to do this a bit late to make sure we've removed any trailing
|
# to do this a bit late to make sure we've removed any trailing
|
||||||
# '='.
|
# '='.
|
||||||
if not longopt_re.match (long):
|
if not longopt_re.match(long):
|
||||||
raise DistutilsGetoptError, \
|
raise DistutilsGetoptError, \
|
||||||
("invalid long option name '%s' " +
|
("invalid long option name '%s' " +
|
||||||
"(must be letters, numbers, hyphens only") % long
|
"(must be letters, numbers, hyphens only") % long
|
||||||
|
|
||||||
self.attr_name[long] = self.get_attr_name (long)
|
self.attr_name[long] = self.get_attr_name(long)
|
||||||
if short:
|
if short:
|
||||||
self.short_opts.append (short)
|
self.short_opts.append(short)
|
||||||
self.short2long[short[0]] = long
|
self.short2long[short[0]] = long
|
||||||
|
|
||||||
# for option_table
|
# for option_table
|
||||||
|
@ -239,8 +239,8 @@ class FancyGetopt:
|
||||||
(args, object). If 'object' is supplied, it is modified in place
|
(args, object). If 'object' is supplied, it is modified in place
|
||||||
and 'getopt()' just returns 'args'; in both cases, the returned
|
and 'getopt()' just returns 'args'; in both cases, the returned
|
||||||
'args' is a modified copy of the passed-in 'args' list, which is
|
'args' is a modified copy of the passed-in 'args' list, which is
|
||||||
left untouched."""
|
left untouched.
|
||||||
|
"""
|
||||||
if args is None:
|
if args is None:
|
||||||
args = sys.argv[1:]
|
args = sys.argv[1:]
|
||||||
if object is None:
|
if object is None:
|
||||||
|
@ -251,17 +251,17 @@ class FancyGetopt:
|
||||||
|
|
||||||
self._grok_option_table()
|
self._grok_option_table()
|
||||||
|
|
||||||
short_opts = string.join (self.short_opts)
|
short_opts = string.join(self.short_opts)
|
||||||
try:
|
try:
|
||||||
(opts, args) = getopt.getopt (args, short_opts, self.long_opts)
|
(opts, args) = getopt.getopt(args, short_opts, self.long_opts)
|
||||||
except getopt.error, msg:
|
except getopt.error, msg:
|
||||||
raise DistutilsArgError, msg
|
raise DistutilsArgError, msg
|
||||||
|
|
||||||
for (opt, val) in opts:
|
for (opt, val) in opts:
|
||||||
if len (opt) == 2 and opt[0] == '-': # it's a short option
|
if len(opt) == 2 and opt[0] == '-': # it's a short option
|
||||||
opt = self.short2long[opt[1]]
|
opt = self.short2long[opt[1]]
|
||||||
|
|
||||||
elif len (opt) > 2 and opt[0:2] == '--':
|
elif len(opt) > 2 and opt[0:2] == '--':
|
||||||
opt = opt[2:]
|
opt = opt[2:]
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
@ -277,7 +277,7 @@ class FancyGetopt:
|
||||||
raise DistutilsInternalError, \
|
raise DistutilsInternalError, \
|
||||||
"this can't happen: bad option value '%s'" % value
|
"this can't happen: bad option value '%s'" % value
|
||||||
|
|
||||||
alias = self.negative_alias.get (opt)
|
alias = self.negative_alias.get(opt)
|
||||||
if alias:
|
if alias:
|
||||||
opt = alias
|
opt = alias
|
||||||
val = 0
|
val = 0
|
||||||
|
@ -285,8 +285,8 @@ class FancyGetopt:
|
||||||
val = 1
|
val = 1
|
||||||
|
|
||||||
attr = self.attr_name[opt]
|
attr = self.attr_name[opt]
|
||||||
setattr (object, attr, val)
|
setattr(object, attr, val)
|
||||||
self.option_order.append ((opt, val))
|
self.option_order.append((opt, val))
|
||||||
|
|
||||||
# for opts
|
# for opts
|
||||||
|
|
||||||
|
@ -301,8 +301,8 @@ class FancyGetopt:
|
||||||
def get_option_order (self):
|
def get_option_order (self):
|
||||||
"""Returns the list of (option, value) tuples processed by the
|
"""Returns the list of (option, value) tuples processed by the
|
||||||
previous run of 'getopt()'. Raises RuntimeError if
|
previous run of 'getopt()'. Raises RuntimeError if
|
||||||
'getopt()' hasn't been called yet."""
|
'getopt()' hasn't been called yet.
|
||||||
|
"""
|
||||||
if self.option_order is None:
|
if self.option_order is None:
|
||||||
raise RuntimeError, "'getopt()' hasn't been called yet"
|
raise RuntimeError, "'getopt()' hasn't been called yet"
|
||||||
else:
|
else:
|
||||||
|
@ -311,8 +311,8 @@ class FancyGetopt:
|
||||||
|
|
||||||
def generate_help (self, header=None):
|
def generate_help (self, header=None):
|
||||||
"""Generate help text (a list of strings, one per suggested line of
|
"""Generate help text (a list of strings, one per suggested line of
|
||||||
output) from the option table for this FancyGetopt object."""
|
output) from the option table for this FancyGetopt object.
|
||||||
|
"""
|
||||||
# Blithely assume the option table is good: probably wouldn't call
|
# Blithely assume the option table is good: probably wouldn't call
|
||||||
# 'generate_help()' unless you've already called 'getopt()'.
|
# 'generate_help()' unless you've already called 'getopt()'.
|
||||||
|
|
||||||
|
@ -321,7 +321,7 @@ class FancyGetopt:
|
||||||
for option in self.option_table:
|
for option in self.option_table:
|
||||||
long = option[0]
|
long = option[0]
|
||||||
short = option[1]
|
short = option[1]
|
||||||
l = len (long)
|
l = len(long)
|
||||||
if long[-1] == '=':
|
if long[-1] == '=':
|
||||||
l = l - 1
|
l = l - 1
|
||||||
if short is not None:
|
if short is not None:
|
||||||
|
@ -363,29 +363,29 @@ class FancyGetopt:
|
||||||
|
|
||||||
for (long,short,help) in self.option_table:
|
for (long,short,help) in self.option_table:
|
||||||
|
|
||||||
text = wrap_text (help, text_width)
|
text = wrap_text(help, text_width)
|
||||||
if long[-1] == '=':
|
if long[-1] == '=':
|
||||||
long = long[0:-1]
|
long = long[0:-1]
|
||||||
|
|
||||||
# Case 1: no short option at all (makes life easy)
|
# Case 1: no short option at all (makes life easy)
|
||||||
if short is None:
|
if short is None:
|
||||||
if text:
|
if text:
|
||||||
lines.append (" --%-*s %s" % (max_opt, long, text[0]))
|
lines.append(" --%-*s %s" % (max_opt, long, text[0]))
|
||||||
else:
|
else:
|
||||||
lines.append (" --%-*s " % (max_opt, long))
|
lines.append(" --%-*s " % (max_opt, long))
|
||||||
|
|
||||||
# Case 2: we have a short option, so we have to include it
|
# Case 2: we have a short option, so we have to include it
|
||||||
# just after the long option
|
# just after the long option
|
||||||
else:
|
else:
|
||||||
opt_names = "%s (-%s)" % (long, short)
|
opt_names = "%s (-%s)" % (long, short)
|
||||||
if text:
|
if text:
|
||||||
lines.append (" --%-*s %s" %
|
lines.append(" --%-*s %s" %
|
||||||
(max_opt, opt_names, text[0]))
|
(max_opt, opt_names, text[0]))
|
||||||
else:
|
else:
|
||||||
lines.append (" --%-*s" % opt_names)
|
lines.append(" --%-*s" % opt_names)
|
||||||
|
|
||||||
for l in text[1:]:
|
for l in text[1:]:
|
||||||
lines.append (big_indent + l)
|
lines.append(big_indent + l)
|
||||||
|
|
||||||
# for self.option_table
|
# for self.option_table
|
||||||
|
|
||||||
|
@ -396,20 +396,19 @@ class FancyGetopt:
|
||||||
def print_help (self, header=None, file=None):
|
def print_help (self, header=None, file=None):
|
||||||
if file is None:
|
if file is None:
|
||||||
file = sys.stdout
|
file = sys.stdout
|
||||||
for line in self.generate_help (header):
|
for line in self.generate_help(header):
|
||||||
file.write (line + "\n")
|
file.write(line + "\n")
|
||||||
# print_help ()
|
|
||||||
|
|
||||||
# class FancyGetopt
|
# class FancyGetopt
|
||||||
|
|
||||||
|
|
||||||
def fancy_getopt (options, negative_opt, object, args):
|
def fancy_getopt (options, negative_opt, object, args):
|
||||||
parser = FancyGetopt (options)
|
parser = FancyGetopt(options)
|
||||||
parser.set_negative_aliases (negative_opt)
|
parser.set_negative_aliases(negative_opt)
|
||||||
return parser.getopt (args, object)
|
return parser.getopt(args, object)
|
||||||
|
|
||||||
|
|
||||||
WS_TRANS = string.maketrans (string.whitespace, ' ' * len (string.whitespace))
|
WS_TRANS = string.maketrans(string.whitespace, ' ' * len(string.whitespace))
|
||||||
|
|
||||||
def wrap_text (text, width):
|
def wrap_text (text, width):
|
||||||
"""wrap_text(text : string, width : int) -> [string]
|
"""wrap_text(text : string, width : int) -> [string]
|
||||||
|
@ -420,13 +419,13 @@ def wrap_text (text, width):
|
||||||
|
|
||||||
if text is None:
|
if text is None:
|
||||||
return []
|
return []
|
||||||
if len (text) <= width:
|
if len(text) <= width:
|
||||||
return [text]
|
return [text]
|
||||||
|
|
||||||
text = string.expandtabs (text)
|
text = string.expandtabs(text)
|
||||||
text = string.translate (text, WS_TRANS)
|
text = string.translate(text, WS_TRANS)
|
||||||
chunks = re.split (r'( +|-+)', text)
|
chunks = re.split(r'( +|-+)', text)
|
||||||
chunks = filter (None, chunks) # ' - ' results in empty strings
|
chunks = filter(None, chunks) # ' - ' results in empty strings
|
||||||
lines = []
|
lines = []
|
||||||
|
|
||||||
while chunks:
|
while chunks:
|
||||||
|
@ -435,9 +434,9 @@ def wrap_text (text, width):
|
||||||
cur_len = 0 # length of current line
|
cur_len = 0 # length of current line
|
||||||
|
|
||||||
while chunks:
|
while chunks:
|
||||||
l = len (chunks[0])
|
l = len(chunks[0])
|
||||||
if cur_len + l <= width: # can squeeze (at least) this chunk in
|
if cur_len + l <= width: # can squeeze (at least) this chunk in
|
||||||
cur_line.append (chunks[0])
|
cur_line.append(chunks[0])
|
||||||
del chunks[0]
|
del chunks[0]
|
||||||
cur_len = cur_len + l
|
cur_len = cur_len + l
|
||||||
else: # this line is full
|
else: # this line is full
|
||||||
|
@ -452,7 +451,7 @@ def wrap_text (text, width):
|
||||||
# chunk that's too big too fit on a line -- so we break
|
# chunk that's too big too fit on a line -- so we break
|
||||||
# down and break it up at the line width
|
# down and break it up at the line width
|
||||||
if cur_len == 0:
|
if cur_len == 0:
|
||||||
cur_line.append (chunks[0][0:width])
|
cur_line.append(chunks[0][0:width])
|
||||||
chunks[0] = chunks[0][width:]
|
chunks[0] = chunks[0][width:]
|
||||||
|
|
||||||
# all-whitespace chunks at the end of a line can be discarded
|
# all-whitespace chunks at the end of a line can be discarded
|
||||||
|
@ -463,7 +462,7 @@ def wrap_text (text, width):
|
||||||
|
|
||||||
# and store this line in the list-of-all-lines -- as a single
|
# and store this line in the list-of-all-lines -- as a single
|
||||||
# string, of course!
|
# string, of course!
|
||||||
lines.append (string.join (cur_line, ''))
|
lines.append(string.join(cur_line, ''))
|
||||||
|
|
||||||
# while chunks
|
# while chunks
|
||||||
|
|
||||||
|
@ -501,5 +500,5 @@ say, "How should I know?"].)"""
|
||||||
|
|
||||||
for w in (10, 20, 30, 40):
|
for w in (10, 20, 30, 40):
|
||||||
print "width: %d" % w
|
print "width: %d" % w
|
||||||
print string.join (wrap_text (text, w), "\n")
|
print string.join(wrap_text(text, w), "\n")
|
||||||
print
|
print
|
||||||
|
|
|
@ -55,7 +55,7 @@ class FileList:
|
||||||
# -- Fallback warning/debug functions ------------------------------
|
# -- Fallback warning/debug functions ------------------------------
|
||||||
|
|
||||||
def __warn (self, msg):
|
def __warn (self, msg):
|
||||||
sys.stderr.write ("warning: %s\n" % msg)
|
sys.stderr.write("warning: %s\n" % msg)
|
||||||
|
|
||||||
def __debug_print (self, msg):
|
def __debug_print (self, msg):
|
||||||
"""Print 'msg' to stdout if the global DEBUG (taken from the
|
"""Print 'msg' to stdout if the global DEBUG (taken from the
|
||||||
|
@ -87,7 +87,7 @@ class FileList:
|
||||||
|
|
||||||
def remove_duplicates (self):
|
def remove_duplicates (self):
|
||||||
# Assumes list has been sorted!
|
# Assumes list has been sorted!
|
||||||
for i in range (len(self.files)-1, 0, -1):
|
for i in range(len(self.files)-1, 0, -1):
|
||||||
if self.files[i] == self.files[i-1]:
|
if self.files[i] == self.files[i-1]:
|
||||||
del self.files[i]
|
del self.files[i]
|
||||||
|
|
||||||
|
@ -95,21 +95,21 @@ class FileList:
|
||||||
# -- "File template" methods ---------------------------------------
|
# -- "File template" methods ---------------------------------------
|
||||||
|
|
||||||
def _parse_template_line (self, line):
|
def _parse_template_line (self, line):
|
||||||
words = string.split (line)
|
words = string.split(line)
|
||||||
action = words[0]
|
action = words[0]
|
||||||
|
|
||||||
patterns = dir = dir_pattern = None
|
patterns = dir = dir_pattern = None
|
||||||
|
|
||||||
if action in ('include', 'exclude',
|
if action in ('include', 'exclude',
|
||||||
'global-include', 'global-exclude'):
|
'global-include', 'global-exclude'):
|
||||||
if len (words) < 2:
|
if len(words) < 2:
|
||||||
raise DistutilsTemplateError, \
|
raise DistutilsTemplateError, \
|
||||||
"'%s' expects <pattern1> <pattern2> ..." % action
|
"'%s' expects <pattern1> <pattern2> ..." % action
|
||||||
|
|
||||||
patterns = map(convert_path, words[1:])
|
patterns = map(convert_path, words[1:])
|
||||||
|
|
||||||
elif action in ('recursive-include', 'recursive-exclude'):
|
elif action in ('recursive-include', 'recursive-exclude'):
|
||||||
if len (words) < 3:
|
if len(words) < 3:
|
||||||
raise DistutilsTemplateError, \
|
raise DistutilsTemplateError, \
|
||||||
"'%s' expects <dir> <pattern1> <pattern2> ..." % action
|
"'%s' expects <dir> <pattern1> <pattern2> ..." % action
|
||||||
|
|
||||||
|
@ -117,7 +117,7 @@ class FileList:
|
||||||
patterns = map(convert_path, words[2:])
|
patterns = map(convert_path, words[2:])
|
||||||
|
|
||||||
elif action in ('graft', 'prune'):
|
elif action in ('graft', 'prune'):
|
||||||
if len (words) != 2:
|
if len(words) != 2:
|
||||||
raise DistutilsTemplateError, \
|
raise DistutilsTemplateError, \
|
||||||
"'%s' expects a single <dir_pattern>" % action
|
"'%s' expects a single <dir_pattern>" % action
|
||||||
|
|
||||||
|
@ -146,13 +146,13 @@ class FileList:
|
||||||
if action == 'include':
|
if action == 'include':
|
||||||
self.debug_print("include " + string.join(patterns))
|
self.debug_print("include " + string.join(patterns))
|
||||||
for pattern in patterns:
|
for pattern in patterns:
|
||||||
if not self.include_pattern (pattern, anchor=1):
|
if not self.include_pattern(pattern, anchor=1):
|
||||||
self.warn("no files found matching '%s'" % pattern)
|
self.warn("no files found matching '%s'" % pattern)
|
||||||
|
|
||||||
elif action == 'exclude':
|
elif action == 'exclude':
|
||||||
self.debug_print("exclude " + string.join(patterns))
|
self.debug_print("exclude " + string.join(patterns))
|
||||||
for pattern in patterns:
|
for pattern in patterns:
|
||||||
if not self.exclude_pattern (pattern, anchor=1):
|
if not self.exclude_pattern(pattern, anchor=1):
|
||||||
self.warn(
|
self.warn(
|
||||||
"no previously-included files found matching '%s'"%
|
"no previously-included files found matching '%s'"%
|
||||||
pattern)
|
pattern)
|
||||||
|
@ -160,15 +160,15 @@ class FileList:
|
||||||
elif action == 'global-include':
|
elif action == 'global-include':
|
||||||
self.debug_print("global-include " + string.join(patterns))
|
self.debug_print("global-include " + string.join(patterns))
|
||||||
for pattern in patterns:
|
for pattern in patterns:
|
||||||
if not self.include_pattern (pattern, anchor=0):
|
if not self.include_pattern(pattern, anchor=0):
|
||||||
self.warn (("no files found matching '%s' " +
|
self.warn(("no files found matching '%s' " +
|
||||||
"anywhere in distribution") %
|
"anywhere in distribution") %
|
||||||
pattern)
|
pattern)
|
||||||
|
|
||||||
elif action == 'global-exclude':
|
elif action == 'global-exclude':
|
||||||
self.debug_print("global-exclude " + string.join(patterns))
|
self.debug_print("global-exclude " + string.join(patterns))
|
||||||
for pattern in patterns:
|
for pattern in patterns:
|
||||||
if not self.exclude_pattern (pattern, anchor=0):
|
if not self.exclude_pattern(pattern, anchor=0):
|
||||||
self.warn(("no previously-included files matching '%s' " +
|
self.warn(("no previously-included files matching '%s' " +
|
||||||
"found anywhere in distribution") %
|
"found anywhere in distribution") %
|
||||||
pattern)
|
pattern)
|
||||||
|
@ -177,8 +177,8 @@ class FileList:
|
||||||
self.debug_print("recursive-include %s %s" %
|
self.debug_print("recursive-include %s %s" %
|
||||||
(dir, string.join(patterns)))
|
(dir, string.join(patterns)))
|
||||||
for pattern in patterns:
|
for pattern in patterns:
|
||||||
if not self.include_pattern (pattern, prefix=dir):
|
if not self.include_pattern(pattern, prefix=dir):
|
||||||
self.warn (("no files found matching '%s' " +
|
self.warn(("no files found matching '%s' " +
|
||||||
"under directory '%s'") %
|
"under directory '%s'") %
|
||||||
(pattern, dir))
|
(pattern, dir))
|
||||||
|
|
||||||
|
@ -190,11 +190,11 @@ class FileList:
|
||||||
self.warn(("no previously-included files matching '%s' " +
|
self.warn(("no previously-included files matching '%s' " +
|
||||||
"found under directory '%s'") %
|
"found under directory '%s'") %
|
||||||
(pattern, dir))
|
(pattern, dir))
|
||||||
|
|
||||||
elif action == 'graft':
|
elif action == 'graft':
|
||||||
self.debug_print("graft " + dir_pattern)
|
self.debug_print("graft " + dir_pattern)
|
||||||
if not self.include_pattern(None, prefix=dir_pattern):
|
if not self.include_pattern(None, prefix=dir_pattern):
|
||||||
self.warn ("no directories found matching '%s'" % dir_pattern)
|
self.warn("no directories found matching '%s'" % dir_pattern)
|
||||||
|
|
||||||
elif action == 'prune':
|
elif action == 'prune':
|
||||||
self.debug_print("prune " + dir_pattern)
|
self.debug_print("prune " + dir_pattern)
|
||||||
|
@ -212,8 +212,7 @@ class FileList:
|
||||||
# -- Filtering/selection methods -----------------------------------
|
# -- Filtering/selection methods -----------------------------------
|
||||||
|
|
||||||
def include_pattern (self, pattern,
|
def include_pattern (self, pattern,
|
||||||
anchor=1, prefix=None, is_regex=0):
|
anchor=1, prefix=None, is_regex=0):
|
||||||
|
|
||||||
"""Select strings (presumably filenames) from 'self.files' that
|
"""Select strings (presumably filenames) from 'self.files' that
|
||||||
match 'pattern', a Unix-style wildcard (glob) pattern. Patterns
|
match 'pattern', a Unix-style wildcard (glob) pattern. Patterns
|
||||||
are not quite the same as implemented by the 'fnmatch' module: '*'
|
are not quite the same as implemented by the 'fnmatch' module: '*'
|
||||||
|
@ -239,7 +238,7 @@ class FileList:
|
||||||
Return 1 if files are found.
|
Return 1 if files are found.
|
||||||
"""
|
"""
|
||||||
files_found = 0
|
files_found = 0
|
||||||
pattern_re = translate_pattern (pattern, anchor, prefix, is_regex)
|
pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
|
||||||
self.debug_print("include_pattern: applying regex r'%s'" %
|
self.debug_print("include_pattern: applying regex r'%s'" %
|
||||||
pattern_re.pattern)
|
pattern_re.pattern)
|
||||||
|
|
||||||
|
@ -248,9 +247,9 @@ class FileList:
|
||||||
self.findall()
|
self.findall()
|
||||||
|
|
||||||
for name in self.allfiles:
|
for name in self.allfiles:
|
||||||
if pattern_re.search (name):
|
if pattern_re.search(name):
|
||||||
self.debug_print(" adding " + name)
|
self.debug_print(" adding " + name)
|
||||||
self.files.append (name)
|
self.files.append(name)
|
||||||
files_found = 1
|
files_found = 1
|
||||||
|
|
||||||
return files_found
|
return files_found
|
||||||
|
@ -267,11 +266,11 @@ class FileList:
|
||||||
Return 1 if files are found.
|
Return 1 if files are found.
|
||||||
"""
|
"""
|
||||||
files_found = 0
|
files_found = 0
|
||||||
pattern_re = translate_pattern (pattern, anchor, prefix, is_regex)
|
pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
|
||||||
self.debug_print("exclude_pattern: applying regex r'%s'" %
|
self.debug_print("exclude_pattern: applying regex r'%s'" %
|
||||||
pattern_re.pattern)
|
pattern_re.pattern)
|
||||||
for i in range (len(self.files)-1, -1, -1):
|
for i in range(len(self.files)-1, -1, -1):
|
||||||
if pattern_re.search (self.files[i]):
|
if pattern_re.search(self.files[i]):
|
||||||
self.debug_print(" removing " + self.files[i])
|
self.debug_print(" removing " + self.files[i])
|
||||||
del self.files[i]
|
del self.files[i]
|
||||||
files_found = 1
|
files_found = 1
|
||||||
|
@ -299,11 +298,11 @@ def findall (dir = os.curdir):
|
||||||
|
|
||||||
while stack:
|
while stack:
|
||||||
dir = pop()
|
dir = pop()
|
||||||
names = os.listdir (dir)
|
names = os.listdir(dir)
|
||||||
|
|
||||||
for name in names:
|
for name in names:
|
||||||
if dir != os.curdir: # avoid the dreaded "./" syndrome
|
if dir != os.curdir: # avoid the dreaded "./" syndrome
|
||||||
fullname = os.path.join (dir, name)
|
fullname = os.path.join(dir, name)
|
||||||
else:
|
else:
|
||||||
fullname = name
|
fullname = name
|
||||||
|
|
||||||
|
@ -311,9 +310,9 @@ def findall (dir = os.curdir):
|
||||||
stat = os.stat(fullname)
|
stat = os.stat(fullname)
|
||||||
mode = stat[ST_MODE]
|
mode = stat[ST_MODE]
|
||||||
if S_ISREG(mode):
|
if S_ISREG(mode):
|
||||||
list.append (fullname)
|
list.append(fullname)
|
||||||
elif S_ISDIR(mode) and not S_ISLNK(mode):
|
elif S_ISDIR(mode) and not S_ISLNK(mode):
|
||||||
push (fullname)
|
push(fullname)
|
||||||
|
|
||||||
return list
|
return list
|
||||||
|
|
||||||
|
@ -324,7 +323,7 @@ def glob_to_re (pattern):
|
||||||
that '*' does not match "special characters" (which are
|
that '*' does not match "special characters" (which are
|
||||||
platform-specific).
|
platform-specific).
|
||||||
"""
|
"""
|
||||||
pattern_re = fnmatch.translate (pattern)
|
pattern_re = fnmatch.translate(pattern)
|
||||||
|
|
||||||
# '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
|
# '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
|
||||||
# IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
|
# IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
|
||||||
|
@ -333,7 +332,7 @@ def glob_to_re (pattern):
|
||||||
# character except the special characters.
|
# character except the special characters.
|
||||||
# XXX currently the "special characters" are just slash -- i.e. this is
|
# XXX currently the "special characters" are just slash -- i.e. this is
|
||||||
# Unix-only.
|
# Unix-only.
|
||||||
pattern_re = re.sub (r'(^|[^\\])\.', r'\1[^/]', pattern_re)
|
pattern_re = re.sub(r'(^|[^\\])\.', r'\1[^/]', pattern_re)
|
||||||
return pattern_re
|
return pattern_re
|
||||||
|
|
||||||
# glob_to_re ()
|
# glob_to_re ()
|
||||||
|
@ -352,17 +351,17 @@ def translate_pattern (pattern, anchor=1, prefix=None, is_regex=0):
|
||||||
return pattern
|
return pattern
|
||||||
|
|
||||||
if pattern:
|
if pattern:
|
||||||
pattern_re = glob_to_re (pattern)
|
pattern_re = glob_to_re(pattern)
|
||||||
else:
|
else:
|
||||||
pattern_re = ''
|
pattern_re = ''
|
||||||
|
|
||||||
if prefix is not None:
|
if prefix is not None:
|
||||||
prefix_re = (glob_to_re (prefix))[0:-1] # ditch trailing $
|
prefix_re = (glob_to_re(prefix))[0:-1] # ditch trailing $
|
||||||
pattern_re = "^" + os.path.join (prefix_re, ".*" + pattern_re)
|
pattern_re = "^" + os.path.join(prefix_re, ".*" + pattern_re)
|
||||||
else: # no prefix -- respect anchor flag
|
else: # no prefix -- respect anchor flag
|
||||||
if anchor:
|
if anchor:
|
||||||
pattern_re = "^" + pattern_re
|
pattern_re = "^" + pattern_re
|
||||||
|
|
||||||
return re.compile (pattern_re)
|
return re.compile(pattern_re)
|
||||||
|
|
||||||
# translate_pattern ()
|
# translate_pattern ()
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue