Merged revisions 64286 via svnmerge from

svn+ssh://pythondev@svn.python.org/python/trunk

................
  r64286 | benjamin.peterson | 2008-06-14 21:31:05 -0500 (Sat, 14 Jun 2008) | 49 lines

  Merged revisions 63661,63666,63695,63711,63729,63769,63790,63880,63886 via svnmerge from
  svn+ssh://pythondev@svn.python.org/sandbox/trunk/2to3/lib2to3

  ........
    r63661 | georg.brandl | 2008-05-26 05:26:20 -0500 (Mon, 26 May 2008) | 2 lines

    Add import fixes for dbm package.
  ........
    r63666 | georg.brandl | 2008-05-26 05:49:09 -0500 (Mon, 26 May 2008) | 2 lines

    Add xmlrpc package fixes.
  ........
    r63695 | georg.brandl | 2008-05-26 10:14:33 -0500 (Mon, 26 May 2008) | 2 lines

    Add fixer entries for http package.
  ........
    r63711 | benjamin.peterson | 2008-05-26 13:43:51 -0500 (Mon, 26 May 2008) | 2 lines

    add import mapping for test.test_support -> test.support
  ........
    r63729 | benjamin.peterson | 2008-05-26 16:31:03 -0500 (Mon, 26 May 2008) | 2 lines

    mapping for commands module -> subprocess
  ........
    r63769 | brett.cannon | 2008-05-29 00:13:13 -0500 (Thu, 29 May 2008) | 1 line

    Fixer for UserString.UserString over to the collections module.
  ........
    r63790 | brett.cannon | 2008-05-29 14:13:51 -0500 (Thu, 29 May 2008) | 4 lines

    Add a fixer for UserList.

    Closes issue #2878. Thanks to Quentin Gallet-Gilles for the patch.
  ........
    r63880 | collin.winter | 2008-06-01 18:09:38 -0500 (Sun, 01 Jun 2008) | 6 lines

    Move lib2to3/fixes/{basefix,util}.py down to lib2to3/.

    This is step 1 of turning lib2to3/ into a general-purpose refactoring
    library, reusable by other projects.
  ........
    r63886 | collin.winter | 2008-06-01 22:15:01 -0500 (Sun, 01 Jun 2008) | 5 lines

    Allow refactoring tools to specify a directory for fixer modules.

    This is step 2 of turning lib2to3/ into a general-purpose refactoring
    library, reusable by other projects. Step 1: r63880.
  ........
................
This commit is contained in:
Benjamin Peterson 2008-06-15 02:57:40 +00:00
parent 979f311728
commit df6dc8f107
46 changed files with 277 additions and 158 deletions

View file

@ -1,188 +0,0 @@
# Copyright 2006 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Base class for fixers (optional, but recommended)."""
# Python imports
import logging
import itertools
# Get a usable 'set' constructor
try:
set
except NameError:
from sets import Set as set
# Local imports
from ..patcomp import PatternCompiler
from .. import pygram
from .util import does_tree_import
class BaseFix(object):
"""Optional base class for fixers.
The subclass name must be FixFooBar where FooBar is the result of
removing underscores and capitalizing the words of the fix name.
For example, the class name for a fixer named 'has_key' should be
FixHasKey.
"""
PATTERN = None # Most subclasses should override with a string literal
pattern = None # Compiled pattern, set by compile_pattern()
options = None # Options object passed to initializer
filename = None # The filename (set by set_filename)
logger = None # A logger (set by set_filename)
numbers = itertools.count(1) # For new_name()
used_names = set() # A set of all used NAMEs
order = "post" # Does the fixer prefer pre- or post-order traversal
explicit = False # Is this ignored by refactor.py -f all?
run_order = 5 # Fixers will be sorted by run order before execution
# Lower numbers will be run first.
# Shortcut for access to Python grammar symbols
syms = pygram.python_symbols
def __init__(self, options, log):
"""Initializer. Subclass may override.
Args:
options: an optparse.Values instance which can be used
to inspect the command line options.
log: a list to append warnings and other messages to.
"""
self.options = options
self.log = log
self.compile_pattern()
def compile_pattern(self):
"""Compiles self.PATTERN into self.pattern.
Subclass may override if it doesn't want to use
self.{pattern,PATTERN} in .match().
"""
if self.PATTERN is not None:
self.pattern = PatternCompiler().compile_pattern(self.PATTERN)
def set_filename(self, filename):
"""Set the filename, and a logger derived from it.
The main refactoring tool should call this.
"""
self.filename = filename
self.logger = logging.getLogger(filename)
def match(self, node):
"""Returns match for a given parse tree node.
Should return a true or false object (not necessarily a bool).
It may return a non-empty dict of matching sub-nodes as
returned by a matching pattern.
Subclass may override.
"""
results = {"node": node}
return self.pattern.match(node, results) and results
def transform(self, node, results):
"""Returns the transformation for a given parse tree node.
Args:
node: the root of the parse tree that matched the fixer.
results: a dict mapping symbolic names to part of the match.
Returns:
None, or a node that is a modified copy of the
argument node. The node argument may also be modified in-place to
effect the same change.
Subclass *must* override.
"""
raise NotImplementedError()
def parenthesize(self, node):
"""Wrapper around pygram.parenthesize()."""
return pygram.parenthesize(node)
def new_name(self, template="xxx_todo_changeme"):
"""Return a string suitable for use as an identifier
The new name is guaranteed not to conflict with other identifiers.
"""
name = template
while name in self.used_names:
name = template + str(next(self.numbers))
self.used_names.add(name)
return name
def log_message(self, message):
if self.first_log:
self.first_log = False
self.log.append("### In file %s ###" % self.filename)
self.log.append(message)
def cannot_convert(self, node, reason=None):
"""Warn the user that a given chunk of code is not valid Python 3,
but that it cannot be converted automatically.
First argument is the top-level node for the code in question.
Optional second argument is why it can't be converted.
"""
lineno = node.get_lineno()
for_output = node.clone()
for_output.set_prefix("")
msg = "Line %d: could not convert: %s"
self.log_message(msg % (lineno, for_output))
if reason:
self.log_message(reason)
def warning(self, node, reason):
"""Used for warning the user about possible uncertainty in the
translation.
First argument is the top-level node for the code in question.
Optional second argument is why it can't be converted.
"""
lineno = node.get_lineno()
self.log_message("Line %d: %s" % (lineno, reason))
def start_tree(self, tree, filename):
"""Some fixers need to maintain tree-wide state.
This method is called once, at the start of tree fix-up.
tree - the root node of the tree to be processed.
filename - the name of the file the tree came from.
"""
self.used_names = tree.used_names
self.set_filename(filename)
self.numbers = itertools.count(1)
self.first_log = True
def finish_tree(self, tree, filename):
"""Some fixers need to maintain tree-wide state.
This method is called once, at the conclusion of tree fix-up.
tree - the root node of the tree to be processed.
filename - the name of the file the tree came from.
"""
pass
class ConditionalFix(BaseFix):
""" Base class for fixers which not execute if an import is found. """
# This is the name of the import which, if found, will cause the test to be skipped
skip_on = None
def start_tree(self, *args):
super(ConditionalFix, self).start_tree(*args)
self._should_skip = None
def should_skip(self, node):
if self._should_skip is not None:
return self._should_skip
pkg = self.skip_on.split(".")
name = pkg[-1]
pkg = ".".join(pkg[:-1])
self._should_skip = does_tree_import(pkg, name, node)
return self._should_skip

View file

@ -8,10 +8,10 @@ This converts apply(func, v, k) into (func)(*v, **k)."""
# Local imports
from .. import pytree
from ..pgen2 import token
from . import basefix
from .util import Call, Comma
from .. import fixer_base
from ..fixer_util import Call, Comma
class FixApply(basefix.BaseFix):
class FixApply(fixer_base.BaseFix):
PATTERN = """
power< 'apply'

View file

@ -2,10 +2,10 @@
# Author: Christian Heimes
# Local imports
from . import basefix
from .util import Name
from .. import fixer_base
from ..fixer_util import Name
class FixBasestring(basefix.BaseFix):
class FixBasestring(fixer_base.BaseFix):
PATTERN = "'basestring'"

View file

@ -4,11 +4,11 @@
"""Fixer that changes buffer(...) into memoryview(...)."""
# Local imports
from . import basefix
from .util import Name
from .. import fixer_base
from ..fixer_util import Name
class FixBuffer(basefix.BaseFix):
class FixBuffer(fixer_base.BaseFix):
explicit = True # The user must ask for this fixer

View file

@ -7,10 +7,10 @@ This converts callable(obj) into hasattr(obj, '__call__')."""
# Local imports
from .. import pytree
from . import basefix
from .util import Call, Name, String
from .. import fixer_base
from ..fixer_util import Call, Name, String
class FixCallable(basefix.BaseFix):
class FixCallable(fixer_base.BaseFix):
# Ignore callable(*args) or use of keywords.
# Either could be a hint that the builtin callable() is not being used.

View file

@ -27,15 +27,15 @@ as an argument to a function that introspects the argument).
from .. import pytree
from .. import patcomp
from ..pgen2 import token
from . import basefix
from .util import Name, Call, LParen, RParen, ArgList, Dot, set
from . import util
from .. import fixer_base
from ..fixer_util import Name, Call, LParen, RParen, ArgList, Dot, set
from .. import fixer_util
iter_exempt = util.consuming_calls | set(["iter"])
iter_exempt = fixer_util.consuming_calls | set(["iter"])
class FixDict(basefix.BaseFix):
class FixDict(fixer_base.BaseFix):
PATTERN = """
power< head=any+
trailer< '.' method=('keys'|'items'|'values'|
@ -92,7 +92,7 @@ class FixDict(basefix.BaseFix):
return results["func"].value in iter_exempt
else:
# list(d.keys()) -> list(d.keys()), etc.
return results["func"].value in util.consuming_calls
return results["func"].value in fixer_util.consuming_calls
if not isiter:
return False
# for ... in d.iterkeys() -> for ... in d.keys(), etc.

View file

@ -24,8 +24,8 @@ The following cases will be converted:
# Local imports
from .. import pytree
from ..pgen2 import token
from . import basefix
from .util import Assign, Attr, Name, is_tuple, is_list, reversed
from .. import fixer_base
from ..fixer_util import Assign, Attr, Name, is_tuple, is_list, reversed
def find_excepts(nodes):
for i, n in enumerate(nodes):
@ -33,7 +33,7 @@ def find_excepts(nodes):
if n.children[0].value == 'except':
yield (n, nodes[i+2])
class FixExcept(basefix.BaseFix):
class FixExcept(fixer_base.BaseFix):
PATTERN = """
try_stmt< 'try' ':' suite

View file

@ -11,11 +11,11 @@ exec code in ns1, ns2 -> exec(code, ns1, ns2)
# Local imports
from .. import pytree
from . import basefix
from .util import Comma, Name, Call
from .. import fixer_base
from ..fixer_util import Comma, Name, Call
class FixExec(basefix.BaseFix):
class FixExec(fixer_base.BaseFix):
PATTERN = """
exec_stmt< 'exec' a=any 'in' b=any [',' c=any] >

View file

@ -8,11 +8,11 @@ exec() function.
"""
from .. import pytree
from . import basefix
from .util import Comma, Name, Call, LParen, RParen, Dot
from .. import fixer_base
from ..fixer_util import Comma, Name, Call, LParen, RParen, Dot
class FixExecfile(basefix.BaseFix):
class FixExecfile(fixer_base.BaseFix):
PATTERN = """
power< 'execfile' trailer< '(' arglist< filename=any [',' globals=any [',' locals=any ] ] > ')' > >

View file

@ -15,10 +15,10 @@ Python 2.6 figure it out.
# Local imports
from ..pgen2 import token
from . import basefix
from .util import Name, Call, ListComp, in_special_context
from .. import fixer_base
from ..fixer_util import Name, Call, ListComp, in_special_context
class FixFilter(basefix.ConditionalFix):
class FixFilter(fixer_base.ConditionalFix):
PATTERN = """
filter_lambda=power<

View file

@ -2,11 +2,11 @@
# Author: Collin Winter
# Local imports
from . import basefix
from .util import Name
from .. import fixer_base
from ..fixer_util import Name
class FixFuncattrs(basefix.BaseFix):
class FixFuncattrs(fixer_base.BaseFix):
PATTERN = """
power< any+ trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals'
| 'func_name' | 'func_defaults' | 'func_code'

View file

@ -5,10 +5,10 @@ from __future__ import foo is replaced with an empty line.
# Author: Christian Heimes
# Local imports
from . import basefix
from .util import BlankLine
from .. import fixer_base
from ..fixer_util import BlankLine
class FixFuture(basefix.BaseFix):
class FixFuture(fixer_base.BaseFix):
PATTERN = """import_from< 'from' module_name="__future__" 'import' any >"""
# This should be run last -- some things check for the import

View file

@ -32,11 +32,11 @@ CAVEATS:
# Local imports
from .. import pytree
from ..pgen2 import token
from . import basefix
from .util import Name
from .. import fixer_base
from ..fixer_util import Name
class FixHasKey(basefix.BaseFix):
class FixHasKey(fixer_base.BaseFix):
PATTERN = """
anchor=power<

View file

@ -28,13 +28,13 @@ into
# Author: Jacques Frechet, Collin Winter
# Local imports
from . import basefix
from .util import Call, Comma, Name, Node, syms
from .. import fixer_base
from ..fixer_util import Call, Comma, Name, Node, syms
CMP = "(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)"
TYPE = "power< 'type' trailer< '(' x=any ')' > >"
class FixIdioms(basefix.BaseFix):
class FixIdioms(fixer_base.BaseFix):
explicit = True # The user must ask for this fixer

View file

@ -11,11 +11,11 @@ Becomes:
"""
# Local imports
from . import basefix
from .. import fixer_base
from os.path import dirname, join, exists, pathsep
from .util import FromImport
from ..fixer_util import FromImport
class FixImport(basefix.BaseFix):
class FixImport(fixer_base.BaseFix):
PATTERN = """
import_from< type='from' imp=any 'import' any >

View file

@ -8,8 +8,8 @@ Fixes:
# Author: Collin Winter
# Local imports
from . import basefix
from .util import Name, attr_chain, any, set
from .. import fixer_base
from ..fixer_util import Name, attr_chain, any, set
import builtins
builtin_names = [name for name in dir(builtins)
if name not in ("__name__", "__doc__")]
@ -150,6 +150,123 @@ MAPPING = {"StringIO": ("io", ["StringIO"]),
'error', 'exit', 'exit_thread', 'get_ident',
'interrupt_main', 'stack_size', 'start_new',
'start_new_thread']),
'whichdb': ('dbm', ['whichdb']),
'anydbm': ('dbm', ['error', 'open']),
'dbhash': ('dbm.bsd', ['error', 'open']),
'dumbdbm': ('dbm.dumb', ['error', 'open', '_Database']),
'dbm': ('dbm.ndbm', ['error', 'open', 'library']),
'gdbm': ('dbm.gnu', ['error', 'open', 'open_flags']),
'xmlrpclib': ('xmlrpc.client',
['Error', 'ProtocolError', 'ResponseError', 'Fault',
'ServerProxy', 'Boolean', 'DateTime', 'Binary',
'ExpatParser', 'FastMarshaller', 'FastParser',
'FastUnmarshaller', 'MultiCall', 'MultiCallIterator',
'SlowParser', 'Marshaller', 'Unmarshaller', 'Server',
'Transport', 'SafeTransport', 'SgmlopParser',
'boolean', 'getparser', 'dumps', 'loads', 'escape',
'PARSE_ERROR', 'SERVER_ERROR', 'WRAPPERS',
'APPLICATION_ERROR', 'SYSTEM_ERROR',
'TRANSPORT_ERROR', 'NOT_WELLFORMED_ERROR',
'UNSUPPORTED_ENCODING', 'INVALID_ENCODING_CHAR',
'INVALID_XMLRPC', 'METHOD_NOT_FOUND',
'INVALID_METHOD_PARAMS', 'INTERNAL_ERROR',
'MININT', 'MAXINT']),
'DocXMLRPCServer': ('xmlrpc.server',
['CGIXMLRPCRequestHandler',
'DocCGIXMLRPCRequestHandler',
'DocXMLRPCRequestHandler', 'DocXMLRPCServer',
'ServerHTMLDoc', 'SimpleXMLRPCRequestHandler',
'SimpleXMLRPCServer', 'XMLRPCDocGenerator',
'resolve_dotted_attribute']),
'SimpleXMLRPCServer': ('xmlrpc.server',
['CGIXMLRPCRequestHandler',
'Fault', 'SimpleXMLRPCDispatcher',
'SimpleXMLRPCRequestHandler',
'SimpleXMLRPCServer', 'SocketServer',
'list_public_methods',
'remove_duplicates',
'resolve_dotted_attribute']),
'httplib': ('http.client',
['ACCEPTED', 'BAD_GATEWAY', 'BAD_REQUEST',
'BadStatusLine', 'CONFLICT', 'CONTINUE', 'CREATED',
'CannotSendHeader', 'CannotSendRequest',
'EXPECTATION_FAILED', 'FAILED_DEPENDENCY', 'FORBIDDEN',
'FOUND', 'FakeSocket', 'GATEWAY_TIMEOUT', 'GONE',
'HTTP', 'HTTPConnection', 'HTTPException',
'HTTPMessage', 'HTTPResponse', 'HTTPS',
'HTTPSConnection', 'HTTPS_PORT', 'HTTP_PORT',
'HTTP_VERSION_NOT_SUPPORTED', 'IM_USED',
'INSUFFICIENT_STORAGE', 'INTERNAL_SERVER_ERROR',
'ImproperConnectionState', 'IncompleteRead',
'InvalidURL', 'LENGTH_REQUIRED', 'LOCKED',
'LineAndFileWrapper', 'MAXAMOUNT', 'METHOD_NOT_ALLOWED',
'MOVED_PERMANENTLY', 'MULTIPLE_CHOICES', 'MULTI_STATUS',
'NON_AUTHORITATIVE_INFORMATION', 'NOT_ACCEPTABLE',
'NOT_EXTENDED', 'NOT_FOUND', 'NOT_IMPLEMENTED',
'NOT_MODIFIED', 'NO_CONTENT', 'NotConnected', 'OK',
'PARTIAL_CONTENT', 'PAYMENT_REQUIRED',
'PRECONDITION_FAILED', 'PROCESSING',
'PROXY_AUTHENTICATION_REQUIRED',
'REQUESTED_RANGE_NOT_SATISFIABLE',
'REQUEST_ENTITY_TOO_LARGE', 'REQUEST_TIMEOUT',
'REQUEST_URI_TOO_LONG', 'RESET_CONTENT',
'ResponseNotReady', 'SEE_OTHER', 'SERVICE_UNAVAILABLE',
'SSLFile', 'SWITCHING_PROTOCOLS', 'SharedSocket',
'SharedSocketClient', 'StringIO', 'TEMPORARY_REDIRECT',
'UNAUTHORIZED', 'UNPROCESSABLE_ENTITY',
'UNSUPPORTED_MEDIA_TYPE', 'UPGRADE_REQUIRED',
'USE_PROXY', 'UnimplementedFileMode', 'UnknownProtocol',
'UnknownTransferEncoding', 'error', 'responses']),
'Cookie': ('http.cookies',
['BaseCookie', 'Cookie', 'CookieError', 'Morsel',
'SerialCookie', 'SimpleCookie', 'SmartCookie']),
'cookielib': ('http.cookiejar',
['Absent', 'Cookie', 'CookieJar', 'CookiePolicy',
'DAYS', 'DEFAULT_HTTP_PORT', 'DefaultCookiePolicy',
'EPOCH_YEAR', 'ESCAPED_CHAR_RE', 'FileCookieJar',
'HEADER_ESCAPE_RE', 'HEADER_JOIN_ESCAPE_RE',
'HEADER_QUOTED_VALUE_RE', 'HEADER_TOKEN_RE',
'HEADER_VALUE_RE', 'HTTP_PATH_SAFE', 'IPV4_RE',
'ISO_DATE_RE', 'LOOSE_HTTP_DATE_RE', 'LWPCookieJar',
'LoadError', 'MISSING_FILENAME_TEXT', 'MONTHS',
'MONTHS_LOWER', 'MozillaCookieJar', 'STRICT_DATE_RE',
'TIMEZONE_RE', 'UTC_ZONES', 'WEEKDAY_RE',
'cut_port_re', 'deepvalues', 'domain_match',
'eff_request_host', 'escape_path', 'http2time',
'is_HDN', 'is_third_party', 'iso2time',
'join_header_words', 'liberal_is_HDN', 'logger',
'lwp_cookie_str', 'month', 'offset_from_tz_string',
'parse_ns_headers', 'reach', 'request_host',
'request_path', 'request_port', 'split_header_words',
'time', 'time2isoz', 'time2netscape', 'unmatched',
'uppercase_escaped_char', 'urllib',
'user_domain_match', 'vals_sorted_by_key']),
'BaseHTTPServer': ('http.server',
['BaseHTTPRequestHandler',
'DEFAULT_ERROR_MESSAGE', 'HTTPServer']),
'SimpleHTTPServer': ('http.server', ['SimpleHTTPRequestHandler']),
'CGIHTTPServer': ('http.server',
['CGIHTTPRequestHandler', 'executable',
'nobody_uid', 'nobody']),
'test.test_support': ('test.support',
["Error", "TestFailed", "TestSkipped", "ResourceDenied",
"import_module", "verbose", "use_resources",
"max_memuse", "record_original_stdout",
"get_original_stdout", "unload", "unlink", "rmtree",
"forget", "is_resource_enabled", "requires",
"find_unused_port", "bind_port",
"fcmp", "is_jython", "TESTFN", "HOST",
"FUZZ", "findfile", "verify", "vereq", "sortdict",
"check_syntax_error", "open_urlresource", "WarningMessage",
"catch_warning", "CleanImport", "EnvironmentVarGuard",
"TransientResource", "captured_output", "captured_stdout",
"TransientResource", "transient_internet", "run_with_locale",
"set_memlimit", "bigmemtest", "bigaddrspacetest",
"BasicTestRunner", "run_unittest", "run_doctest",
"threading_setup", "threading_cleanup", "reap_children"]),
'commands': ('subprocess', ['getstatusoutput', 'getoutput']),
'UserString' : ('collections', ['UserString']),
'UserList' : ('collections', ['UserList']),
}
@ -180,7 +297,7 @@ def build_pattern():
yield """bare_name=%s""" % alternates(bare)
class FixImports(basefix.BaseFix):
class FixImports(fixer_base.BaseFix):
PATTERN = "|".join(build_pattern())
order = "pre" # Pre-order tree traversal

View file

@ -2,15 +2,15 @@
# Author: Andre Roberge
# Local imports
from . import basefix
from .util import Call, Name
from .. import fixer_base
from ..fixer_util import Call, Name
from .. import patcomp
context = patcomp.compile_pattern("power< 'eval' trailer< '(' any ')' > >")
class FixInput(basefix.BaseFix):
class FixInput(fixer_base.BaseFix):
PATTERN = """
power< 'input' args=trailer< '(' [any] ')' > >

View file

@ -7,11 +7,11 @@ intern(s) -> sys.intern(s)"""
# Local imports
from .. import pytree
from . import basefix
from .util import Name, Attr
from .. import fixer_base
from ..fixer_util import Name, Attr
class FixIntern(basefix.BaseFix):
class FixIntern(fixer_base.BaseFix):
PATTERN = """
power< 'intern'

View file

@ -8,10 +8,10 @@
"""
# Local imports
from . import basefix
from .util import Name
from .. import fixer_base
from ..fixer_util import Name
class FixItertools(basefix.BaseFix):
class FixItertools(fixer_base.BaseFix):
it_funcs = "('imap'|'ifilter'|'izip'|'ifilterfalse')"
PATTERN = """
power< it='itertools'

View file

@ -1,10 +1,10 @@
""" Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse) """
# Local imports
from . import basefix
from .util import BlankLine
from .. import fixer_base
from ..fixer_util import BlankLine
class FixItertoolsImports(basefix.BaseFix):
class FixItertoolsImports(fixer_base.BaseFix):
PATTERN = """
import_from< 'from' 'itertools' 'import' imports=any >
""" %(locals())

View file

@ -8,11 +8,11 @@ This also strips the trailing 'L' or 'l' from long loterals.
# Local imports
from .. import pytree
from . import basefix
from .util import Name, Number
from .. import fixer_base
from ..fixer_util import Name, Number
class FixLong(basefix.BaseFix):
class FixLong(fixer_base.BaseFix):
PATTERN = """
(long_type = 'long' | number = NUMBER)

View file

@ -21,11 +21,11 @@ soon as the shortest argument is exhausted.
# Local imports
from ..pgen2 import token
from . import basefix
from .util import Name, Call, ListComp, in_special_context
from .. import fixer_base
from ..fixer_util import Name, Call, ListComp, in_special_context
from ..pygram import python_symbols as syms
class FixMap(basefix.ConditionalFix):
class FixMap(fixer_base.ConditionalFix):
PATTERN = """
map_none=power<

View file

@ -3,8 +3,8 @@
# Author: Christian Heimes
# Local imports
from . import basefix
from .util import Name
from .. import fixer_base
from ..fixer_util import Name
MAP = {
"im_func" : "__func__",
@ -12,7 +12,7 @@ MAP = {
"im_class" : "__self__.__class__"
}
class FixMethodattrs(basefix.BaseFix):
class FixMethodattrs(fixer_base.BaseFix):
PATTERN = """
power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* >
"""

View file

@ -6,10 +6,10 @@
# Local imports
from .. import pytree
from ..pgen2 import token
from . import basefix
from .. import fixer_base
class FixNe(basefix.BaseFix):
class FixNe(fixer_base.BaseFix):
# This is so simple that we don't need the pattern compiler.
def match(self, node):

View file

@ -8,13 +8,13 @@
# Local imports
from ..pgen2 import token
from ..pygram import python_symbols as syms
from . import basefix
from .util import Name, Call, find_binding, any
from .. import fixer_base
from ..fixer_util import Name, Call, find_binding, any
bind_warning = "Calls to builtin next() possibly shadowed by global binding"
class FixNext(basefix.BaseFix):
class FixNext(fixer_base.BaseFix):
PATTERN = """
power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > >
|

View file

@ -2,10 +2,10 @@
# Author: Collin Winter
# Local imports
from .import basefix
from .util import Name, syms
from .. import fixer_base
from ..fixer_util import Name, syms
class FixNonzero(basefix.BaseFix):
class FixNonzero(fixer_base.BaseFix):
PATTERN = """
classdef< 'class' any+ ':'
suite< any*

View file

@ -5,11 +5,11 @@
# Local imports
from ..pgen2 import token
from .import basefix
from .util import Number, set
from .. import fixer_base
from ..fixer_util import Number, set
class FixNumliterals(basefix.BaseFix):
class FixNumliterals(fixer_base.BaseFix):
# This is so simple that we don't need the pattern compiler.
def match(self, node):

View file

@ -17,8 +17,8 @@ No changes are applied if print_function is imported from __future__
from .. import patcomp
from .. import pytree
from ..pgen2 import token
from .import basefix
from .util import Name, Call, Comma, String, is_tuple
from .. import fixer_base
from ..fixer_util import Name, Call, Comma, String, is_tuple
parend_expr = patcomp.compile_pattern(
@ -26,7 +26,7 @@ parend_expr = patcomp.compile_pattern(
)
class FixPrint(basefix.ConditionalFix):
class FixPrint(fixer_base.ConditionalFix):
PATTERN = """
simple_stmt< bare='print' any > | print_stmt

View file

@ -24,10 +24,10 @@ CAVEATS:
# Local imports
from .. import pytree
from ..pgen2 import token
from .import basefix
from .util import Name, Call, Attr, ArgList, is_tuple
from .. import fixer_base
from ..fixer_util import Name, Call, Attr, ArgList, is_tuple
class FixRaise(basefix.BaseFix):
class FixRaise(fixer_base.BaseFix):
PATTERN = """
raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] >

View file

@ -2,10 +2,10 @@
# Author: Andre Roberge
# Local imports
from .import basefix
from .util import Name
from .. import fixer_base
from ..fixer_util import Name
class FixRawInput(basefix.BaseFix):
class FixRawInput(fixer_base.BaseFix):
PATTERN = """
power< name='raw_input' trailer< '(' [any] ')' > >

View file

@ -7,8 +7,8 @@ Fixes:
# based on Collin Winter's fix_import
# Local imports
from .import basefix
from .util import Name, attr_chain, any, set
from .. import fixer_base
from ..fixer_util import Name, attr_chain, any, set
MAPPING = {"sys": {"maxint" : "maxsize"},
}
@ -39,7 +39,7 @@ def build_pattern():
#yield """bare_name=%s""" % alternates(bare)
class FixRenames(basefix.BaseFix):
class FixRenames(fixer_base.BaseFix):
PATTERN = "|".join(build_pattern())
order = "pre" # Pre-order tree traversal

View file

@ -4,11 +4,11 @@
"""Fixer that transforms `xyzzy` into repr(xyzzy)."""
# Local imports
from .import basefix
from .util import Call, Name
from .. import fixer_base
from ..fixer_util import Call, Name
class FixRepr(basefix.BaseFix):
class FixRepr(fixer_base.BaseFix):
PATTERN = """
atom < '`' expr=any '`' >

View file

@ -4,11 +4,11 @@
"""Fixer for StandardError -> Exception."""
# Local imports
from .import basefix
from .util import Name
from .. import fixer_base
from ..fixer_util import Name
class FixStandarderror(basefix.BaseFix):
class FixStandarderror(fixer_base.BaseFix):
PATTERN = """
'StandardError'

View file

@ -10,10 +10,10 @@ g.throw("foo"[, V[, T]]) will warn about string exceptions."""
# Local imports
from .. import pytree
from ..pgen2 import token
from .import basefix
from .util import Name, Call, ArgList, Attr, is_tuple
from .. import fixer_base
from ..fixer_util import Name, Call, ArgList, Attr, is_tuple
class FixThrow(basefix.BaseFix):
class FixThrow(fixer_base.BaseFix):
PATTERN = """
power< any trailer< '.' 'throw' >

View file

@ -21,14 +21,14 @@ It will also support lambdas:
# Local imports
from .. import pytree
from ..pgen2 import token
from .import basefix
from .util import Assign, Name, Newline, Number, Subscript, syms
from .. import fixer_base
from ..fixer_util import Assign, Name, Newline, Number, Subscript, syms
def is_docstring(stmt):
return isinstance(stmt, pytree.Node) and \
stmt.children[0].type == token.STRING
class FixTupleParams(basefix.BaseFix):
class FixTupleParams(fixer_base.BaseFix):
PATTERN = """
funcdef< 'def' any parameters< '(' args=any ')' >
['->' any] ':' suite=any+ >

View file

@ -21,8 +21,8 @@ There should be another fixer that handles at least the following constants:
# Local imports
from ..pgen2 import token
from .import basefix
from .util import Name
from .. import fixer_base
from ..fixer_util import Name
_TYPE_MAPPING = {
'BooleanType' : 'bool',
@ -51,7 +51,7 @@ _TYPE_MAPPING = {
_pats = ["power< 'types' trailer< '.' name='%s' > >" % t for t in _TYPE_MAPPING]
class FixTypes(basefix.BaseFix):
class FixTypes(fixer_base.BaseFix):
PATTERN = '|'.join(_pats)

View file

@ -4,9 +4,9 @@
import re
from ..pgen2 import token
from .import basefix
from .. import fixer_base
class FixUnicode(basefix.BaseFix):
class FixUnicode(fixer_base.BaseFix):
PATTERN = "STRING | NAME<'unicode' | 'unichr'>"

View file

@ -7,9 +7,9 @@ uses of colons. It does not touch other uses of whitespace.
from .. import pytree
from ..pgen2 import token
from .import basefix
from .. import fixer_base
class FixWsComma(basefix.BaseFix):
class FixWsComma(fixer_base.BaseFix):
explicit = True # The user must ask for this fixers

View file

@ -4,12 +4,12 @@
"""Fixer that changes xrange(...) into range(...)."""
# Local imports
from .import basefix
from .util import Name, Call, consuming_calls
from .. import fixer_base
from ..fixer_util import Name, Call, consuming_calls
from .. import patcomp
class FixXrange(basefix.BaseFix):
class FixXrange(fixer_base.BaseFix):
PATTERN = """
power< (name='range'|name='xrange') trailer< '(' [any] ')' > any* >

View file

@ -4,11 +4,11 @@ This fixer will also convert g(f.xreadlines) into g(f.__iter__)."""
# Author: Collin Winter
# Local imports
from .import basefix
from .util import Name
from .. import fixer_base
from ..fixer_util import Name
class FixXreadlines(basefix.BaseFix):
class FixXreadlines(fixer_base.BaseFix):
PATTERN = """
power< call=any+ trailer< '.' 'xreadlines' > trailer< '(' ')' > >
|

View file

@ -8,10 +8,10 @@ iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:.
"""
# Local imports
from . import basefix
from .util import Name, Call, in_special_context
from .. import fixer_base
from ..fixer_util import Name, Call, in_special_context
class FixZip(basefix.ConditionalFix):
class FixZip(fixer_base.ConditionalFix):
PATTERN = """
power< 'zip' args=trailer< '(' [any] ')' >

View file

@ -1,366 +0,0 @@
"""Utility functions, node construction macros, etc."""
# Author: Collin Winter
# Local imports
from ..pgen2 import token
from ..pytree import Leaf, Node
from ..pygram import python_symbols as syms
from .. import patcomp
###########################################################
### Common node-construction "macros"
###########################################################
def KeywordArg(keyword, value):
return Node(syms.argument,
[keyword, Leaf(token.EQUAL, '='), value])
def LParen():
return Leaf(token.LPAR, "(")
def RParen():
return Leaf(token.RPAR, ")")
def Assign(target, source):
"""Build an assignment statement"""
if not isinstance(target, list):
target = [target]
if not isinstance(source, list):
source.set_prefix(" ")
source = [source]
return Node(syms.atom,
target + [Leaf(token.EQUAL, "=", prefix=" ")] + source)
def Name(name, prefix=None):
"""Return a NAME leaf"""
return Leaf(token.NAME, name, prefix=prefix)
def Attr(obj, attr):
"""A node tuple for obj.attr"""
return [obj, Node(syms.trailer, [Dot(), attr])]
def Comma():
"""A comma leaf"""
return Leaf(token.COMMA, ",")
def Dot():
"""A period (.) leaf"""
return Leaf(token.DOT, ".")
def ArgList(args, lparen=LParen(), rparen=RParen()):
"""A parenthesised argument list, used by Call()"""
return Node(syms.trailer,
[lparen.clone(),
Node(syms.arglist, args),
rparen.clone()])
def Call(func_name, args, prefix=None):
"""A function call"""
node = Node(syms.power, [func_name, ArgList(args)])
if prefix is not None:
node.set_prefix(prefix)
return node
def Newline():
"""A newline literal"""
return Leaf(token.NEWLINE, "\n")
def BlankLine():
"""A blank line"""
return Leaf(token.NEWLINE, "")
def Number(n, prefix=None):
return Leaf(token.NUMBER, n, prefix=prefix)
def Subscript(index_node):
"""A numeric or string subscript"""
return Node(syms.trailer, [Leaf(token.LBRACE, '['),
index_node,
Leaf(token.RBRACE, ']')])
def String(string, prefix=None):
"""A string leaf"""
return Leaf(token.STRING, string, prefix=prefix)
def ListComp(xp, fp, it, test=None):
"""A list comprehension of the form [xp for fp in it if test].
If test is None, the "if test" part is omitted.
"""
xp.set_prefix("")
fp.set_prefix(" ")
it.set_prefix(" ")
for_leaf = Leaf(token.NAME, "for")
for_leaf.set_prefix(" ")
in_leaf = Leaf(token.NAME, "in")
in_leaf.set_prefix(" ")
inner_args = [for_leaf, fp, in_leaf, it]
if test:
test.set_prefix(" ")
if_leaf = Leaf(token.NAME, "if")
if_leaf.set_prefix(" ")
inner_args.append(Node(syms.comp_if, [if_leaf, test]))
inner = Node(syms.listmaker, [xp, Node(syms.comp_for, inner_args)])
return Node(syms.atom,
[Leaf(token.LBRACE, "["),
inner,
Leaf(token.RBRACE, "]")])
def FromImport(package_name, name_leafs):
""" Return an import statement in the form:
from package import name_leafs"""
# XXX: May not handle dotted imports properly (eg, package_name='foo.bar')
assert package_name == '.' or '.' not in package.name, "FromImport has "\
"not been tested with dotted package names -- use at your own "\
"peril!"
for leaf in name_leafs:
# Pull the leaves out of their old tree
leaf.remove()
children = [Leaf(token.NAME, 'from'),
Leaf(token.NAME, package_name, prefix=" "),
Leaf(token.NAME, 'import', prefix=" "),
Node(syms.import_as_names, name_leafs)]
imp = Node(syms.import_from, children)
return imp
###########################################################
### Determine whether a node represents a given literal
###########################################################
def is_tuple(node):
"""Does the node represent a tuple literal?"""
if isinstance(node, Node) and node.children == [LParen(), RParen()]:
return True
return (isinstance(node, Node)
and len(node.children) == 3
and isinstance(node.children[0], Leaf)
and isinstance(node.children[1], Node)
and isinstance(node.children[2], Leaf)
and node.children[0].value == "("
and node.children[2].value == ")")
def is_list(node):
"""Does the node represent a list literal?"""
return (isinstance(node, Node)
and len(node.children) > 1
and isinstance(node.children[0], Leaf)
and isinstance(node.children[-1], Leaf)
and node.children[0].value == "["
and node.children[-1].value == "]")
###########################################################
### Common portability code. This allows fixers to do, eg,
### "from .util import set" and forget about it.
###########################################################
try:
any = any
except NameError:
def any(l):
for o in l:
if o:
return True
return False
try:
set = set
except NameError:
from sets import Set as set
try:
reversed = reversed
except NameError:
def reversed(l):
return l[::-1]
###########################################################
### Misc
###########################################################
consuming_calls = set(["sorted", "list", "set", "any", "all", "tuple", "sum",
"min", "max"])
def attr_chain(obj, attr):
"""Follow an attribute chain.
If you have a chain of objects where a.foo -> b, b.foo-> c, etc,
use this to iterate over all objects in the chain. Iteration is
terminated by getattr(x, attr) is None.
Args:
obj: the starting object
attr: the name of the chaining attribute
Yields:
Each successive object in the chain.
"""
next = getattr(obj, attr)
while next:
yield next
next = getattr(next, attr)
p0 = """for_stmt< 'for' any 'in' node=any ':' any* >
| comp_for< 'for' any 'in' node=any any* >
"""
p1 = """
power<
( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | 'sum' |
'any' | 'all' | (any* trailer< '.' 'join' >) )
trailer< '(' node=any ')' >
any*
>
"""
p2 = """
power<
'sorted'
trailer< '(' arglist<node=any any*> ')' >
any*
>
"""
pats_built = False
def in_special_context(node):
""" Returns true if node is in an environment where all that is required
of it is being itterable (ie, it doesn't matter if it returns a list
or an itterator).
See test_map_nochange in test_fixers.py for some examples and tests.
"""
global p0, p1, p2, pats_built
if not pats_built:
p1 = patcomp.compile_pattern(p1)
p0 = patcomp.compile_pattern(p0)
p2 = patcomp.compile_pattern(p2)
pats_built = True
patterns = [p0, p1, p2]
for pattern, parent in zip(patterns, attr_chain(node, "parent")):
results = {}
if pattern.match(parent, results) and results["node"] is node:
return True
return False
###########################################################
### The following functions are to find bindings in a suite
###########################################################
def make_suite(node):
if node.type == syms.suite:
return node
node = node.clone()
parent, node.parent = node.parent, None
suite = Node(syms.suite, [node])
suite.parent = parent
return suite
def does_tree_import(package, name, node):
""" Returns true if name is imported from package at the
top level of the tree which node belongs to.
To cover the case of an import like 'import foo', use
Null for the package and 'foo' for the name. """
# Scamper up to the top level namespace
while node.type != syms.file_input:
assert node.parent, "Tree is insane! root found before "\
"file_input node was found."
node = node.parent
binding = find_binding(name, node, package)
return bool(binding)
_def_syms = set([syms.classdef, syms.funcdef])
def find_binding(name, node, package=None):
""" Returns the node which binds variable name, otherwise None.
If optional argument package is supplied, only imports will
be returned.
See test cases for examples."""
for child in node.children:
ret = None
if child.type == syms.for_stmt:
if _find(name, child.children[1]):
return child
n = find_binding(name, make_suite(child.children[-1]), package)
if n: ret = n
elif child.type in (syms.if_stmt, syms.while_stmt):
n = find_binding(name, make_suite(child.children[-1]), package)
if n: ret = n
elif child.type == syms.try_stmt:
n = find_binding(name, make_suite(child.children[2]), package)
if n:
ret = n
else:
for i, kid in enumerate(child.children[3:]):
if kid.type == token.COLON and kid.value == ":":
# i+3 is the colon, i+4 is the suite
n = find_binding(name, make_suite(child.children[i+4]), package)
if n: ret = n
elif child.type in _def_syms and child.children[1].value == name:
ret = child
elif _is_import_binding(child, name, package):
ret = child
elif child.type == syms.simple_stmt:
ret = find_binding(name, child, package)
elif child.type == syms.expr_stmt:
if _find(name, child.children[0]):
ret = child
if ret:
if not package:
return ret
if ret.type in (syms.import_name, syms.import_from):
return ret
return None
_block_syms = set([syms.funcdef, syms.classdef, syms.trailer])
def _find(name, node):
nodes = [node]
while nodes:
node = nodes.pop()
if node.type > 256 and node.type not in _block_syms:
nodes.extend(node.children)
elif node.type == token.NAME and node.value == name:
return node
return None
def _is_import_binding(node, name, package=None):
""" Will reuturn node if node will import name, or node
will import * from package. None is returned otherwise.
See test cases for examples. """
if node.type == syms.import_name and not package:
imp = node.children[1]
if imp.type == syms.dotted_as_names:
for child in imp.children:
if child.type == syms.dotted_as_name:
if child.children[2].value == name:
return node
elif child.type == token.NAME and child.value == name:
return node
elif imp.type == syms.dotted_as_name:
last = imp.children[-1]
if last.type == token.NAME and last.value == name:
return node
elif imp.type == token.NAME and imp.value == name:
return node
elif node.type == syms.import_from:
# unicode(...) is used to make life easier here, because
# from a.b import parses to ['import', ['a', '.', 'b'], ...]
if package and str(node.children[1]).strip() != package:
return None
n = node.children[3]
if package and _find('as', n):
# See test_from_import_as for explanation
return None
elif n.type == syms.import_as_names and _find(name, n):
return node
elif n.type == syms.import_as_name:
child = n.children[2]
if child.type == token.NAME and child.value == name:
return node
elif n.type == token.NAME and n.value == name:
return node
elif package and n.type == token.STAR:
return node
return None