Issue #22823: Use set literals in lib2to3.

This commit is contained in:
Serhiy Storchaka 2014-12-13 21:50:49 +02:00
parent bd62f0a6e4
commit db9b65d9e5
4 changed files with 8 additions and 8 deletions

View file

@ -187,8 +187,8 @@ def parenthesize(node):
return Node(syms.atom, [LParen(), node, RParen()]) return Node(syms.atom, [LParen(), node, RParen()])
consuming_calls = set(["sorted", "list", "set", "any", "all", "tuple", "sum", consuming_calls = {"sorted", "list", "set", "any", "all", "tuple", "sum",
"min", "max", "enumerate"]) "min", "max", "enumerate"}
def attr_chain(obj, attr): def attr_chain(obj, attr):
"""Follow an attribute chain. """Follow an attribute chain.
@ -359,7 +359,7 @@ def touch_import(package, name, node):
root.insert_child(insert_pos, Node(syms.simple_stmt, children)) root.insert_child(insert_pos, Node(syms.simple_stmt, children))
_def_syms = set([syms.classdef, syms.funcdef]) _def_syms = {syms.classdef, syms.funcdef}
def find_binding(name, node, package=None): def find_binding(name, node, package=None):
""" Returns the node which binds variable name, otherwise None. """ Returns the node which binds variable name, otherwise None.
If optional argument package is supplied, only imports will If optional argument package is supplied, only imports will
@ -402,7 +402,7 @@ def find_binding(name, node, package=None):
return ret return ret
return None return None
_block_syms = set([syms.funcdef, syms.classdef, syms.trailer]) _block_syms = {syms.funcdef, syms.classdef, syms.trailer}
def _find(name, node): def _find(name, node):
nodes = [node] nodes = [node]
while nodes: while nodes:

View file

@ -36,7 +36,7 @@ from ..fixer_util import Name, Call, LParen, RParen, ArgList, Dot
from .. import fixer_util from .. import fixer_util
iter_exempt = fixer_util.consuming_calls | set(["iter"]) iter_exempt = fixer_util.consuming_calls | {"iter"}
class FixDict(fixer_base.BaseFix): class FixDict(fixer_base.BaseFix):

View file

@ -32,7 +32,7 @@ class PatternSyntaxError(Exception):
def tokenize_wrapper(input): def tokenize_wrapper(input):
"""Tokenizes a string suppressing significant whitespace.""" """Tokenizes a string suppressing significant whitespace."""
skip = set((token.NEWLINE, token.INDENT, token.DEDENT)) skip = {token.NEWLINE, token.INDENT, token.DEDENT}
tokens = tokenize.generate_tokens(io.StringIO(input).readline) tokens = tokenize.generate_tokens(io.StringIO(input).readline)
for quintuple in tokens: for quintuple in tokens:
type, value, start, end, line_text = quintuple type, value, start, end, line_text = quintuple

View file

@ -57,7 +57,7 @@ def _get_head_types(pat):
# Always return leafs # Always return leafs
if pat.type is None: if pat.type is None:
raise _EveryNode raise _EveryNode
return set([pat.type]) return {pat.type}
if isinstance(pat, pytree.NegatedPattern): if isinstance(pat, pytree.NegatedPattern):
if pat.content: if pat.content:
@ -133,7 +133,7 @@ def _detect_future_features(source):
def advance(): def advance():
tok = next(gen) tok = next(gen)
return tok[0], tok[1] return tok[0], tok[1]
ignore = frozenset((token.NEWLINE, tokenize.NL, token.COMMENT)) ignore = frozenset({token.NEWLINE, tokenize.NL, token.COMMENT})
features = set() features = set()
try: try:
while True: while True: