Killed the <> operator. You must now use !=.

Opportunistically also fixed one or two places where '<> None' should be
'is not None' and where 'type(x) <> y' should be 'not isinstance(x, y)'.
This commit is contained in:
Guido van Rossum 2006-08-24 03:53:23 +00:00
parent 01c77c6628
commit b053cd8f40
36 changed files with 171 additions and 178 deletions

View file

@ -90,7 +90,7 @@ or_test: and_test ('or' and_test)*
and_test: not_test ('and' not_test)* and_test: not_test ('and' not_test)*
not_test: 'not' not_test | comparison not_test: 'not' not_test | comparison
comparison: expr (comp_op expr)* comparison: expr (comp_op expr)*
comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' comp_op: '<'|'>'|'=='|'>='|'<='|'!='|'in'|'not' 'in'|'is'|'is' 'not'
expr: xor_expr ('|' xor_expr)* expr: xor_expr ('|' xor_expr)*
xor_expr: and_expr ('^' and_expr)* xor_expr: and_expr ('^' and_expr)*
and_expr: shift_expr ('&' shift_expr)* and_expr: shift_expr ('&' shift_expr)*

View file

@ -453,7 +453,7 @@ class bsdTableDB :
# error # error
dataitem = None dataitem = None
dataitem = mappings[column](dataitem) dataitem = mappings[column](dataitem)
if dataitem <> None: if dataitem != None:
self.db.put( self.db.put(
_data_key(table, column, rowid), _data_key(table, column, rowid),
dataitem, txn=txn) dataitem, txn=txn)

View file

@ -120,7 +120,7 @@ class CompatibilityTestCase(unittest.TestCase):
try: try:
rec = f.next() rec = f.next()
except KeyError: except KeyError:
assert rec == f.last(), 'Error, last <> last!' assert rec == f.last(), 'Error, last != last!'
f.previous() f.previous()
break break
if verbose: if verbose:

View file

@ -30,7 +30,7 @@ class SimpleRecnoTestCase(unittest.TestCase):
try: try:
os.remove(self.filename) os.remove(self.filename)
except OSError, e: except OSError, e:
if e.errno <> errno.EEXIST: raise if e.errno != errno.EEXIST: raise
def test01_basic(self): def test01_basic(self):
d = db.DB() d = db.DB()

View file

@ -58,7 +58,7 @@ class BaseThreadedTestCase(unittest.TestCase):
try: try:
os.mkdir(homeDir) os.mkdir(homeDir)
except OSError, e: except OSError, e:
if e.errno <> errno.EEXIST: raise if e.errno != errno.EEXIST: raise
self.env = db.DBEnv() self.env = db.DBEnv()
self.setEnvOpts() self.setEnvOpts()
self.env.open(homeDir, self.envflags | db.DB_CREATE) self.env.open(homeDir, self.envflags | db.DB_CREATE)

View file

@ -618,7 +618,7 @@ class Transformer:
for i in range(2, len(nodelist), 2): for i in range(2, len(nodelist), 2):
nl = nodelist[i-1] nl = nodelist[i-1]
# comp_op: '<' | '>' | '=' | '>=' | '<=' | '<>' | '!=' | '==' # comp_op: '<' | '>' | '=' | '>=' | '<=' | '!=' | '=='
# | 'in' | 'not' 'in' | 'is' | 'is' 'not' # | 'in' | 'not' 'in' | 'is' | 'is' 'not'
n = nl[1] n = nl[1]
if n[0] == token.NAME: if n[0] == token.NAME:
@ -1396,7 +1396,7 @@ _doc_nodes = [
symbol.power, symbol.power,
] ]
# comp_op: '<' | '>' | '=' | '>=' | '<=' | '<>' | '!=' | '==' # comp_op: '<' | '>' | '=' | '>=' | '<=' | '!=' | '=='
# | 'in' | 'not' 'in' | 'is' | 'is' 'not' # | 'in' | 'not' 'in' | 'is' | 'is' 'not'
_cmp_types = { _cmp_types = {
token.LESS : '<', token.LESS : '<',

View file

@ -146,7 +146,7 @@ def encode(s, binary=True, maxlinelen=76, eol=NL):
# BAW: should encode() inherit b2a_base64()'s dubious behavior in # BAW: should encode() inherit b2a_base64()'s dubious behavior in
# adding a newline to the encoded string? # adding a newline to the encoded string?
enc = b2a_base64(s[i:i + max_unencoded]) enc = b2a_base64(s[i:i + max_unencoded])
if enc.endswith(NL) and eol <> NL: if enc.endswith(NL) and eol != NL:
enc = enc[:-1] + eol enc = enc[:-1] + eol
encvec.append(enc) encvec.append(enc)
return EMPTYSTRING.join(encvec) return EMPTYSTRING.join(encvec)

View file

@ -250,7 +250,7 @@ class Charset:
Returns "base64" if self.body_encoding is BASE64. Returns "base64" if self.body_encoding is BASE64.
Returns "7bit" otherwise. Returns "7bit" otherwise.
""" """
assert self.body_encoding <> SHORTEST assert self.body_encoding != SHORTEST
if self.body_encoding == QP: if self.body_encoding == QP:
return 'quoted-printable' return 'quoted-printable'
elif self.body_encoding == BASE64: elif self.body_encoding == BASE64:
@ -260,7 +260,7 @@ class Charset:
def convert(self, s): def convert(self, s):
"""Convert a string from the input_codec to the output_codec.""" """Convert a string from the input_codec to the output_codec."""
if self.input_codec <> self.output_codec: if self.input_codec != self.output_codec:
return unicode(s, self.input_codec).encode(self.output_codec) return unicode(s, self.input_codec).encode(self.output_codec)
else: else:
return s return s

View file

@ -211,7 +211,7 @@ class Generator:
# doesn't preserve newlines/continuations in headers. This is no big # doesn't preserve newlines/continuations in headers. This is no big
# deal in practice, but turns out to be inconvenient for the unittest # deal in practice, but turns out to be inconvenient for the unittest
# suite. # suite.
if msg.get_boundary() <> boundary: if msg.get_boundary() != boundary:
msg.set_boundary(boundary) msg.set_boundary(boundary)
# If there's a preamble, write it out, with a trailing CRLF # If there's a preamble, write it out, with a trailing CRLF
if msg.preamble is not None: if msg.preamble is not None:

View file

@ -248,7 +248,7 @@ class Header:
elif not isinstance(charset, Charset): elif not isinstance(charset, Charset):
charset = Charset(charset) charset = Charset(charset)
# If the charset is our faux 8bit charset, leave the string unchanged # If the charset is our faux 8bit charset, leave the string unchanged
if charset <> '8bit': if charset != '8bit':
# We need to test that the string can be converted to unicode and # We need to test that the string can be converted to unicode and
# back to a byte string, given the input and output codecs of the # back to a byte string, given the input and output codecs of the
# charset. # charset.
@ -454,7 +454,7 @@ def _split_ascii(s, firstlen, restlen, continuation_ws, splitchars):
# If this part is longer than maxlen and we aren't already # If this part is longer than maxlen and we aren't already
# splitting on whitespace, try to recursively split this line # splitting on whitespace, try to recursively split this line
# on whitespace. # on whitespace.
if partlen > maxlen and ch <> ' ': if partlen > maxlen and ch != ' ':
subl = _split_ascii(part, maxlen, restlen, subl = _split_ascii(part, maxlen, restlen,
continuation_ws, ' ') continuation_ws, ' ')
lines.extend(subl[:-1]) lines.extend(subl[:-1])

View file

@ -252,7 +252,7 @@ class Message:
charset=charset.get_output_charset()) charset=charset.get_output_charset())
else: else:
self.set_param('charset', charset.get_output_charset()) self.set_param('charset', charset.get_output_charset())
if str(charset) <> charset.get_output_charset(): if str(charset) != charset.get_output_charset():
self._payload = charset.body_encode(self._payload) self._payload = charset.body_encode(self._payload)
if 'Content-Transfer-Encoding' not in self: if 'Content-Transfer-Encoding' not in self:
cte = charset.get_body_encoding() cte = charset.get_body_encoding()
@ -301,7 +301,7 @@ class Message:
name = name.lower() name = name.lower()
newheaders = [] newheaders = []
for k, v in self._headers: for k, v in self._headers:
if k.lower() <> name: if k.lower() != name:
newheaders.append((k, v)) newheaders.append((k, v))
self._headers = newheaders self._headers = newheaders
@ -438,7 +438,7 @@ class Message:
return self.get_default_type() return self.get_default_type()
ctype = paramre.split(value)[0].lower().strip() ctype = paramre.split(value)[0].lower().strip()
# RFC 2045, section 5.2 says if its invalid, use text/plain # RFC 2045, section 5.2 says if its invalid, use text/plain
if ctype.count('/') <> 1: if ctype.count('/') != 1:
return 'text/plain' return 'text/plain'
return ctype return ctype
@ -601,7 +601,7 @@ class Message:
ctype = append_param ctype = append_param
else: else:
ctype = SEMISPACE.join([ctype, append_param]) ctype = SEMISPACE.join([ctype, append_param])
if ctype <> self.get(header): if ctype != self.get(header):
del self[header] del self[header]
self[header] = ctype self[header] = ctype
@ -617,13 +617,13 @@ class Message:
return return
new_ctype = '' new_ctype = ''
for p, v in self.get_params(header=header, unquote=requote): for p, v in self.get_params(header=header, unquote=requote):
if p.lower() <> param.lower(): if p.lower() != param.lower():
if not new_ctype: if not new_ctype:
new_ctype = _formatparam(p, v, requote) new_ctype = _formatparam(p, v, requote)
else: else:
new_ctype = SEMISPACE.join([new_ctype, new_ctype = SEMISPACE.join([new_ctype,
_formatparam(p, v, requote)]) _formatparam(p, v, requote)])
if new_ctype <> self.get(header): if new_ctype != self.get(header):
del self[header] del self[header]
self[header] = new_ctype self[header] = new_ctype

View file

@ -287,7 +287,7 @@ def decode(encoded, eol=NL):
n = len(line) n = len(line)
while i < n: while i < n:
c = line[i] c = line[i]
if c <> '=': if c != '=':
decoded += c decoded += c
i += 1 i += 1
# Otherwise, c == "=". Are we at the end of the line? If so, add # Otherwise, c == "=". Are we at the end of the line? If so, add

View file

@ -51,7 +51,7 @@ def openfile(filename, mode='r'):
class TestEmailBase(unittest.TestCase): class TestEmailBase(unittest.TestCase):
def ndiffAssertEqual(self, first, second): def ndiffAssertEqual(self, first, second):
"""Like failUnlessEqual except use ndiff for readable output.""" """Like failUnlessEqual except use ndiff for readable output."""
if first <> second: if first != second:
sfirst = str(first) sfirst = str(first)
ssecond = str(second) ssecond = str(second)
diff = difflib.ndiff(sfirst.splitlines(), ssecond.splitlines()) diff = difflib.ndiff(sfirst.splitlines(), ssecond.splitlines())
@ -2726,7 +2726,7 @@ class TestCharset(unittest.TestCase):
# Try a charset with None body encoding # Try a charset with None body encoding
c = Charset('us-ascii') c = Charset('us-ascii')
eq('hello world', c.body_encode('hello world')) eq('hello world', c.body_encode('hello world'))
# Try the convert argument, where input codec <> output codec # Try the convert argument, where input codec != output codec
c = Charset('euc-jp') c = Charset('euc-jp')
# With apologies to Tokio Kikuchi ;) # With apologies to Tokio Kikuchi ;)
try: try:

View file

@ -52,7 +52,7 @@ def openfile(filename, mode='r'):
class TestEmailBase(unittest.TestCase): class TestEmailBase(unittest.TestCase):
def ndiffAssertEqual(self, first, second): def ndiffAssertEqual(self, first, second):
"""Like failUnlessEqual except use ndiff for readable output.""" """Like failUnlessEqual except use ndiff for readable output."""
if first <> second: if first != second:
sfirst = str(first) sfirst = str(first)
ssecond = str(second) ssecond = str(second)
diff = difflib.ndiff(sfirst.splitlines(), ssecond.splitlines()) diff = difflib.ndiff(sfirst.splitlines(), ssecond.splitlines())
@ -2732,7 +2732,7 @@ class TestCharset(unittest.TestCase):
# Try a charset with None body encoding # Try a charset with None body encoding
c = Charset('us-ascii') c = Charset('us-ascii')
eq('hello world', c.body_encode('hello world')) eq('hello world', c.body_encode('hello world'))
# Try the convert argument, where input codec <> output codec # Try the convert argument, where input codec != output codec
c = Charset('euc-jp') c = Charset('euc-jp')
# With apologies to Tokio Kikuchi ;) # With apologies to Tokio Kikuchi ;)
try: try:

View file

@ -912,7 +912,8 @@ class Manager:
""" """
#for c in ph.loggers: #for c in ph.loggers:
for c in ph.loggerMap.keys(): for c in ph.loggerMap.keys():
if string.find(c.parent.name, alogger.name) <> 0: # XXX Is the following correct? Shouldn't it be >= 0?
if string.find(c.parent.name, alogger.name) != 0:
alogger.parent = c.parent alogger.parent = c.parent
c.parent = alogger c.parent = alogger

View file

@ -602,7 +602,7 @@ class Menu:
def dispatch(self, id, item, window, event): def dispatch(self, id, item, window, event):
title, shortcut, callback, mtype = self.items[item-1] title, shortcut, callback, mtype = self.items[item-1]
if callback: if callback:
if not self.bar.parent or type(callback) <> types.StringType: if not self.bar.parent or not isinstance(callback, str):
menuhandler = callback menuhandler = callback
else: else:
# callback is string # callback is string
@ -748,7 +748,7 @@ class Window:
self.parent = parent self.parent = parent
def open(self, bounds=(40, 40, 400, 400), resid=None): def open(self, bounds=(40, 40, 400, 400), resid=None):
if resid <> None: if resid is not None:
self.wid = GetNewWindow(resid, -1) self.wid = GetNewWindow(resid, -1)
else: else:
self.wid = NewWindow(bounds, self.__class__.__name__, 1, self.wid = NewWindow(bounds, self.__class__.__name__, 1,
@ -826,7 +826,7 @@ class Window:
# If we're not frontmost, select ourselves and wait for # If we're not frontmost, select ourselves and wait for
# the activate event. # the activate event.
# #
if MyFrontWindow() <> window: if MyFrontWindow() != window:
window.SelectWindow() window.SelectWindow()
return return
# We are. Handle the event. # We are. Handle the event.
@ -875,7 +875,7 @@ class ControlsWindow(Window):
if DEBUG: print "control hit in", window, "on", control, "; pcode =", pcode if DEBUG: print "control hit in", window, "on", control, "; pcode =", pcode
def do_inContent(self, partcode, window, event): def do_inContent(self, partcode, window, event):
if MyFrontWindow() <> window: if MyFrontWindow() != window:
window.SelectWindow() window.SelectWindow()
return return
(what, message, when, where, modifiers) = event (what, message, when, where, modifiers) = event

View file

@ -192,7 +192,7 @@ def process_common(template, progress, code, rsrcname, destname, is_update,
'icl8', 'ics4', 'ics8', 'ICN#', 'ics#'] 'icl8', 'ics4', 'ics8', 'ICN#', 'ics#']
if not copy_codefragment: if not copy_codefragment:
skiptypes.append('cfrg') skiptypes.append('cfrg')
## skipowner = (ownertype <> None) ## skipowner = (ownertype != None)
# Copy the resources from the template # Copy the resources from the template

View file

@ -73,7 +73,7 @@ class CfrgResource:
Res.CloseResFile(resref) Res.CloseResFile(resref)
Res.UseResFile(currentresref) Res.UseResFile(currentresref)
self.parse(data) self.parse(data)
if self.version <> 1: if self.version != 1:
raise error, "unknown 'cfrg' resource format" raise error, "unknown 'cfrg' resource format"
def parse(self, data): def parse(self, data):
@ -143,7 +143,7 @@ class FragmentDescriptor:
return data return data
def getfragment(self): def getfragment(self):
if self.where <> 1: if self.where != 1:
raise error, "can't read fragment, unsupported location" raise error, "can't read fragment, unsupported location"
f = open(self.path, "rb") f = open(self.path, "rb")
f.seek(self.offset) f.seek(self.offset)
@ -155,7 +155,7 @@ class FragmentDescriptor:
return frag return frag
def copydata(self, outfile): def copydata(self, outfile):
if self.where <> 1: if self.where != 1:
raise error, "can't read fragment, unsupported location" raise error, "can't read fragment, unsupported location"
infile = open(self.path, "rb") infile = open(self.path, "rb")
if self.length == 0: if self.length == 0:

View file

@ -169,7 +169,7 @@ def processfile_fromresource(fullname, output=None, basepkgname=None,
aete = decode(data, verbose) aete = decode(data, verbose)
aetelist.append((aete, res.GetResInfo())) aetelist.append((aete, res.GetResInfo()))
finally: finally:
if rf <> cur: if rf != cur:
CloseResFile(rf) CloseResFile(rf)
UseResFile(cur) UseResFile(cur)
# switch back (needed for dialogs in Python) # switch back (needed for dialogs in Python)
@ -332,7 +332,7 @@ def getpstr(f, *args):
def getalign(f): def getalign(f):
if f.tell() & 1: if f.tell() & 1:
c = f.read(1) c = f.read(1)
##if c <> '\0': ##if c != '\0':
## print align:', repr(c) ## print align:', repr(c)
def getlist(f, description, getitem): def getlist(f, description, getitem):
@ -779,7 +779,7 @@ class SuiteCompiler:
if is_enum(a[2]): if is_enum(a[2]):
kname = a[1] kname = a[1]
ename = a[2][0] ename = a[2][0]
if ename <> '****': if ename != '****':
fp.write(" aetools.enumsubst(_arguments, %r, _Enum_%s)\n" % fp.write(" aetools.enumsubst(_arguments, %r, _Enum_%s)\n" %
(kname, identify(ename))) (kname, identify(ename)))
self.enumsneeded[ename] = 1 self.enumsneeded[ename] = 1
@ -810,7 +810,7 @@ class SuiteCompiler:
for a in arguments: for a in arguments:
if is_enum(a[2]): if is_enum(a[2]):
ename = a[2][0] ename = a[2][0]
if ename <> '****': if ename != '****':
self.enumsneeded[ename] = 1 self.enumsneeded[ename] = 1
# #

View file

@ -1574,7 +1574,7 @@ smFHBlkDispErr = -311 #Error occurred during _sDisposePtr (Dispose of FHea
smFHBlockRdErr = -310 #Error occurred during _sGetFHeader. smFHBlockRdErr = -310 #Error occurred during _sGetFHeader.
smBLFieldBad = -309 #ByteLanes field was bad. smBLFieldBad = -309 #ByteLanes field was bad.
smUnExBusErr = -308 #Unexpected BusError smUnExBusErr = -308 #Unexpected BusError
smResrvErr = -307 #Fatal reserved error. Resreved field <> 0. smResrvErr = -307 #Fatal reserved error. Resreved field != 0.
smNosInfoArray = -306 #No sInfoArray. Memory Mgr error. smNosInfoArray = -306 #No sInfoArray. Memory Mgr error.
smDisabledSlot = -305 #This slot is disabled (-305 use to be smLWTstBad) smDisabledSlot = -305 #This slot is disabled (-305 use to be smLWTstBad)
smNoDir = -304 #Directory offset is Nil smNoDir = -304 #Directory offset is Nil

View file

@ -55,12 +55,10 @@ __eq__: (1,)
__lt__: (1,) __lt__: (1,)
__gt__: (1,) __gt__: (1,)
__ne__: (1,) __ne__: (1,)
__ne__: (1,)
__eq__: (1,) __eq__: (1,)
__gt__: (1,) __gt__: (1,)
__lt__: (1,) __lt__: (1,)
__ne__: (1,) __ne__: (1,)
__ne__: (1,)
__del__: () __del__: ()
__getattr__: ('spam',) __getattr__: ('spam',)
__setattr__: ('eggs', 'spam, spam, spam and ham') __setattr__: ('eggs', 'spam, spam, spam and ham')

View file

@ -108,11 +108,11 @@ test_tokenize
37,0-37,1: NL '\n' 37,0-37,1: NL '\n'
38,0-38,20: COMMENT '# Ordinary integers\n' 38,0-38,20: COMMENT '# Ordinary integers\n'
39,0-39,4: NUMBER '0xff' 39,0-39,4: NUMBER '0xff'
39,5-39,7: OP '<>' 39,5-39,7: OP '!='
39,8-39,11: NUMBER '255' 39,8-39,11: NUMBER '255'
39,11-39,12: NEWLINE '\n' 39,11-39,12: NEWLINE '\n'
40,0-40,4: NUMBER '0377' 40,0-40,4: NUMBER '0377'
40,5-40,7: OP '<>' 40,5-40,7: OP '!='
40,8-40,11: NUMBER '255' 40,8-40,11: NUMBER '255'
40,11-40,12: NEWLINE '\n' 40,11-40,12: NEWLINE '\n'
41,0-41,10: NUMBER '2147483647' 41,0-41,10: NUMBER '2147483647'
@ -484,7 +484,7 @@ test_tokenize
149,2-149,3: OP ',' 149,2-149,3: OP ','
149,4-149,5: NAME 'y' 149,4-149,5: NAME 'y'
149,5-149,6: OP ')' 149,5-149,6: OP ')'
149,7-149,9: OP '<>' 149,7-149,9: OP '!='
149,10-149,11: OP '(' 149,10-149,11: OP '('
149,11-149,12: OP '{' 149,11-149,12: OP '{'
149,12-149,15: STRING "'a'" 149,12-149,15: STRING "'a'"
@ -513,7 +513,7 @@ test_tokenize
152,21-152,22: NUMBER '1' 152,21-152,22: NUMBER '1'
152,23-152,25: OP '<=' 152,23-152,25: OP '<='
152,26-152,27: NUMBER '1' 152,26-152,27: NUMBER '1'
152,28-152,30: OP '<>' 152,28-152,30: OP '!='
152,31-152,32: NUMBER '1' 152,31-152,32: NUMBER '1'
152,33-152,35: OP '!=' 152,33-152,35: OP '!='
152,36-152,37: NUMBER '1' 152,36-152,37: NUMBER '1'

View file

@ -8,7 +8,7 @@ from test.test_support import requires, verbose, run_suite, unlink
# When running as a script instead of within the regrtest framework, skip the # When running as a script instead of within the regrtest framework, skip the
# requires test, since it's obvious we want to run them. # requires test, since it's obvious we want to run them.
if __name__ <> '__main__': if __name__ != '__main__':
requires('bsddb') requires('bsddb')
verbose = False verbose = False

View file

@ -244,12 +244,10 @@ str(testme)
testme == 1 testme == 1
testme < 1 testme < 1
testme > 1 testme > 1
testme <> 1
testme != 1 testme != 1
1 == testme 1 == testme
1 < testme 1 < testme
1 > testme 1 > testme
1 <> testme
1 != testme 1 != testme
# This test has to be last (duh.) # This test has to be last (duh.)

View file

@ -19,16 +19,16 @@ try:
except AttributeError: pass except AttributeError: pass
else: raise TestFailed, 'expected AttributeError' else: raise TestFailed, 'expected AttributeError'
if b.__dict__ <> {}: if b.__dict__ != {}:
raise TestFailed, 'expected unassigned func.__dict__ to be {}' raise TestFailed, 'expected unassigned func.__dict__ to be {}'
b.publish = 1 b.publish = 1
if b.publish <> 1: if b.publish != 1:
raise TestFailed, 'function attribute not set to expected value' raise TestFailed, 'function attribute not set to expected value'
docstring = 'its docstring' docstring = 'its docstring'
b.__doc__ = docstring b.__doc__ = docstring
if b.__doc__ <> docstring: if b.__doc__ != docstring:
raise TestFailed, 'problem with setting __doc__ attribute' raise TestFailed, 'problem with setting __doc__ attribute'
if 'publish' not in dir(b): if 'publish' not in dir(b):
@ -49,7 +49,7 @@ d = {'hello': 'world'}
b.__dict__ = d b.__dict__ = d
if b.func_dict is not d: if b.func_dict is not d:
raise TestFailed, 'func.__dict__ assignment to dictionary failed' raise TestFailed, 'func.__dict__ assignment to dictionary failed'
if b.hello <> 'world': if b.hello != 'world':
raise TestFailed, 'attribute after func.__dict__ assignment failed' raise TestFailed, 'attribute after func.__dict__ assignment failed'
f1 = F() f1 = F()
@ -75,13 +75,13 @@ else: raise TestFailed, 'expected AttributeError or TypeError'
# But setting it explicitly on the underlying function object is okay. # But setting it explicitly on the underlying function object is okay.
F.a.im_func.publish = 1 F.a.im_func.publish = 1
if F.a.publish <> 1: if F.a.publish != 1:
raise TestFailed, 'unbound method attribute not set to expected value' raise TestFailed, 'unbound method attribute not set to expected value'
if f1.a.publish <> 1: if f1.a.publish != 1:
raise TestFailed, 'bound method attribute access did not work' raise TestFailed, 'bound method attribute access did not work'
if f2.a.publish <> 1: if f2.a.publish != 1:
raise TestFailed, 'bound method attribute access did not work' raise TestFailed, 'bound method attribute access did not work'
if 'publish' not in dir(F.a): if 'publish' not in dir(F.a):
@ -117,7 +117,7 @@ else: raise TestFailed, 'expected TypeError or AttributeError'
F.a.im_func.__dict__ = {'one': 11, 'two': 22, 'three': 33} F.a.im_func.__dict__ = {'one': 11, 'two': 22, 'three': 33}
if f1.a.two <> 22: if f1.a.two != 22:
raise TestFailed, 'setting __dict__' raise TestFailed, 'setting __dict__'
from UserDict import UserDict from UserDict import UserDict
@ -128,7 +128,7 @@ try:
except (AttributeError, TypeError): pass except (AttributeError, TypeError): pass
else: raise TestFailed else: raise TestFailed
if f2.a.one <> f1.a.one <> F.a.one <> 11: if f2.a.one != f1.a.one != F.a.one != 11:
raise TestFailed raise TestFailed
# im_func may not be a Python method! # im_func may not be a Python method!
@ -136,7 +136,7 @@ import new
F.id = new.instancemethod(id, None, F) F.id = new.instancemethod(id, None, F)
eff = F() eff = F()
if eff.id() <> id(eff): if eff.id() != id(eff):
raise TestFailed raise TestFailed
try: try:

View file

@ -412,7 +412,7 @@ def test_break_continue_loop(extra_burning_oil = 1, count=0):
continue continue
except: except:
raise raise
if count > 2 or big_hippo <> 1: if count > 2 or big_hippo != 1:
print "continue then break in try/except in loop broken!" print "continue then break in try/except in loop broken!"
test_break_continue_loop() test_break_continue_loop()
@ -586,12 +586,11 @@ if 1 and 1 or 1 and 1 and 1 or not 1 and 1: pass
print 'comparison' print 'comparison'
### comparison: expr (comp_op expr)* ### comparison: expr (comp_op expr)*
### comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' ### comp_op: '<'|'>'|'=='|'>='|'<='|'!='|'in'|'not' 'in'|'is'|'is' 'not'
if 1: pass if 1: pass
x = (1 == 1) x = (1 == 1)
if 1 == 1: pass if 1 == 1: pass
if 1 != 1: pass if 1 != 1: pass
if 1 <> 1: pass
if 1 < 1: pass if 1 < 1: pass
if 1 > 1: pass if 1 > 1: pass
if 1 <= 1: pass if 1 <= 1: pass
@ -600,7 +599,7 @@ if 1 is 1: pass
if 1 is not 1: pass if 1 is not 1: pass
if 1 in (): pass if 1 in (): pass
if 1 not in (): pass if 1 not in (): pass
if 1 < 1 > 1 == 1 >= 1 <= 1 <> 1 != 1 in 1 not in 1 is 1 is not 1: pass if 1 < 1 > 1 == 1 >= 1 <= 1 != 1 in 1 not in 1 is 1 is not 1: pass
print 'binary mask ops' print 'binary mask ops'
x = 1 & 1 x = 1 & 1

View file

@ -285,7 +285,7 @@ class GeneralModuleTests(unittest.TestCase):
orig = sys.getrefcount(__name__) orig = sys.getrefcount(__name__)
socket.getnameinfo(__name__,0) socket.getnameinfo(__name__,0)
except SystemError: except SystemError:
if sys.getrefcount(__name__) <> orig: if sys.getrefcount(__name__) != orig:
self.fail("socket.getnameinfo loses a reference") self.fail("socket.getnameinfo loses a reference")
def testInterpreterCrash(self): def testInterpreterCrash(self):

View file

@ -515,7 +515,7 @@ class HandlerTests(TestCase):
"Content-Length: %d\r\n" "Content-Length: %d\r\n"
"\r\n%s" % (h.error_status,len(h.error_body),h.error_body)) "\r\n%s" % (h.error_status,len(h.error_body),h.error_body))
self.failUnless(h.stderr.getvalue().find("AssertionError")<>-1) self.failUnless("AssertionError" in h.stderr.getvalue())
def testErrorAfterOutput(self): def testErrorAfterOutput(self):
MSG = "Some output has been sent" MSG = "Some output has been sent"
@ -528,7 +528,7 @@ class HandlerTests(TestCase):
self.assertEqual(h.stdout.getvalue(), self.assertEqual(h.stdout.getvalue(),
"Status: 200 OK\r\n" "Status: 200 OK\r\n"
"\r\n"+MSG) "\r\n"+MSG)
self.failUnless(h.stderr.getvalue().find("AssertionError")<>-1) self.failUnless("AssertionError" in h.stderr.getvalue())
def testHeaderFormats(self): def testHeaderFormats(self):

View file

@ -36,8 +36,8 @@ x = 1 \
x = 0 x = 0
# Ordinary integers # Ordinary integers
0xff <> 255 0xff != 255
0377 <> 255 0377 != 255
2147483647 != 017777777777 2147483647 != 017777777777
-2147483647-1 != 020000000000 -2147483647-1 != 020000000000
037777777777 != -1 037777777777 != -1
@ -146,10 +146,10 @@ if 0:
def d22(a, b, c=1, d=2): pass def d22(a, b, c=1, d=2): pass
def d01v(a=1, *restt, **restd): pass def d01v(a=1, *restt, **restd): pass
(x, y) <> ({'a':1}, {'b':2}) (x, y) != ({'a':1}, {'b':2})
# comparison # comparison
if 1 < 1 > 1 == 1 >= 1 <= 1 <> 1 != 1 in 1 not in 1 is 1 is not 1: pass if 1 < 1 > 1 == 1 >= 1 <= 1 != 1 != 1 in 1 not in 1 is 1 is not 1: pass
# binary # binary
x = 1 & 1 x = 1 & 1

View file

@ -77,7 +77,7 @@ String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
# Because of leftmost-then-longest match semantics, be sure to put the # Because of leftmost-then-longest match semantics, be sure to put the
# longest operators first (e.g., if = came before ==, == would get # longest operators first (e.g., if = came before ==, == would get
# recognized as two instances of =). # recognized as two instances of =).
Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=", Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"!=",
r"//=?", r"//=?",
r"[+\-*/%&|^=<>]=?", r"[+\-*/%&|^=<>]=?",
r"~") r"~")

View file

@ -627,7 +627,7 @@ def main():
for o, a in opts: for o, a in opts:
if o == '-n': new_win = 1 if o == '-n': new_win = 1
elif o == '-t': new_win = 2 elif o == '-t': new_win = 2
if len(args) <> 1: if len(args) != 1:
print >>sys.stderr, usage print >>sys.stderr, usage
sys.exit(1) sys.exit(1)

View file

@ -63,7 +63,7 @@ class Headers:
Does *not* raise an exception if the header is missing. Does *not* raise an exception if the header is missing.
""" """
name = name.lower() name = name.lower()
self._headers[:] = [kv for kv in self._headers if kv[0].lower()<>name] self._headers[:] = [kv for kv in self._headers if kv[0].lower() != name]
def __getitem__(self,name): def __getitem__(self,name):
"""Get the first header value for 'name' """Get the first header value for 'name'

View file

@ -98,7 +98,7 @@ def shift_path_info(environ):
return None return None
path_parts = path_info.split('/') path_parts = path_info.split('/')
path_parts[1:-1] = [p for p in path_parts[1:-1] if p and p<>'.'] path_parts[1:-1] = [p for p in path_parts[1:-1] if p and p != '.']
name = path_parts[1] name = path_parts[1]
del path_parts[1] del path_parts[1]

View file

@ -982,7 +982,6 @@ PyToken_TwoChars(int c1, int c2)
break; break;
case '<': case '<':
switch (c2) { switch (c2) {
case '>': return NOTEQUAL;
case '=': return LESSEQUAL; case '=': return LESSEQUAL;
case '<': return LEFTSHIFT; case '<': return LEFTSHIFT;
} }

View file

@ -478,7 +478,7 @@ ast_for_augassign(const node *n)
static cmpop_ty static cmpop_ty
ast_for_comp_op(const node *n) ast_for_comp_op(const node *n)
{ {
/* comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is' /* comp_op: '<'|'>'|'=='|'>='|'<='|'!='|'in'|'not' 'in'|'is'
|'is' 'not' |'is' 'not'
*/ */
REQ(n, comp_op); REQ(n, comp_op);

View file

@ -1107,17 +1107,16 @@ static state states_51[2] = {
{1, arcs_51_0}, {1, arcs_51_0},
{2, arcs_51_1}, {2, arcs_51_1},
}; };
static arc arcs_52_0[10] = { static arc arcs_52_0[9] = {
{117, 1}, {117, 1},
{118, 1}, {118, 1},
{119, 1}, {119, 1},
{120, 1}, {120, 1},
{121, 1}, {121, 1},
{122, 1}, {122, 1},
{123, 1},
{83, 1}, {83, 1},
{114, 2}, {114, 2},
{124, 3}, {123, 3},
}; };
static arc arcs_52_1[1] = { static arc arcs_52_1[1] = {
{0, 1}, {0, 1},
@ -1130,16 +1129,16 @@ static arc arcs_52_3[2] = {
{0, 3}, {0, 3},
}; };
static state states_52[4] = { static state states_52[4] = {
{10, arcs_52_0}, {9, arcs_52_0},
{1, arcs_52_1}, {1, arcs_52_1},
{1, arcs_52_2}, {1, arcs_52_2},
{2, arcs_52_3}, {2, arcs_52_3},
}; };
static arc arcs_53_0[1] = { static arc arcs_53_0[1] = {
{125, 1}, {124, 1},
}; };
static arc arcs_53_1[2] = { static arc arcs_53_1[2] = {
{126, 0}, {125, 0},
{0, 1}, {0, 1},
}; };
static state states_53[2] = { static state states_53[2] = {
@ -1147,10 +1146,10 @@ static state states_53[2] = {
{2, arcs_53_1}, {2, arcs_53_1},
}; };
static arc arcs_54_0[1] = { static arc arcs_54_0[1] = {
{127, 1}, {126, 1},
}; };
static arc arcs_54_1[2] = { static arc arcs_54_1[2] = {
{128, 0}, {127, 0},
{0, 1}, {0, 1},
}; };
static state states_54[2] = { static state states_54[2] = {
@ -1158,10 +1157,10 @@ static state states_54[2] = {
{2, arcs_54_1}, {2, arcs_54_1},
}; };
static arc arcs_55_0[1] = { static arc arcs_55_0[1] = {
{129, 1}, {128, 1},
}; };
static arc arcs_55_1[2] = { static arc arcs_55_1[2] = {
{130, 0}, {129, 0},
{0, 1}, {0, 1},
}; };
static state states_55[2] = { static state states_55[2] = {
@ -1169,10 +1168,10 @@ static state states_55[2] = {
{2, arcs_55_1}, {2, arcs_55_1},
}; };
static arc arcs_56_0[1] = { static arc arcs_56_0[1] = {
{131, 1}, {130, 1},
}; };
static arc arcs_56_1[3] = { static arc arcs_56_1[3] = {
{132, 0}, {131, 0},
{57, 0}, {57, 0},
{0, 1}, {0, 1},
}; };
@ -1181,11 +1180,11 @@ static state states_56[2] = {
{3, arcs_56_1}, {3, arcs_56_1},
}; };
static arc arcs_57_0[1] = { static arc arcs_57_0[1] = {
{133, 1}, {132, 1},
}; };
static arc arcs_57_1[3] = { static arc arcs_57_1[3] = {
{133, 0},
{134, 0}, {134, 0},
{135, 0},
{0, 1}, {0, 1},
}; };
static state states_57[2] = { static state states_57[2] = {
@ -1193,13 +1192,13 @@ static state states_57[2] = {
{3, arcs_57_1}, {3, arcs_57_1},
}; };
static arc arcs_58_0[1] = { static arc arcs_58_0[1] = {
{136, 1}, {135, 1},
}; };
static arc arcs_58_1[5] = { static arc arcs_58_1[5] = {
{28, 0}, {28, 0},
{136, 0},
{137, 0}, {137, 0},
{138, 0}, {138, 0},
{139, 0},
{0, 1}, {0, 1},
}; };
static state states_58[2] = { static state states_58[2] = {
@ -1207,13 +1206,13 @@ static state states_58[2] = {
{5, arcs_58_1}, {5, arcs_58_1},
}; };
static arc arcs_59_0[4] = { static arc arcs_59_0[4] = {
{133, 1},
{134, 1}, {134, 1},
{135, 1}, {139, 1},
{140, 1}, {140, 2},
{141, 2},
}; };
static arc arcs_59_1[1] = { static arc arcs_59_1[1] = {
{136, 2}, {135, 2},
}; };
static arc arcs_59_2[1] = { static arc arcs_59_2[1] = {
{0, 2}, {0, 2},
@ -1224,15 +1223,15 @@ static state states_59[3] = {
{1, arcs_59_2}, {1, arcs_59_2},
}; };
static arc arcs_60_0[1] = { static arc arcs_60_0[1] = {
{142, 1}, {141, 1},
}; };
static arc arcs_60_1[3] = { static arc arcs_60_1[3] = {
{143, 1}, {142, 1},
{29, 2}, {29, 2},
{0, 1}, {0, 1},
}; };
static arc arcs_60_2[1] = { static arc arcs_60_2[1] = {
{136, 3}, {135, 3},
}; };
static arc arcs_60_3[1] = { static arc arcs_60_3[1] = {
{0, 3}, {0, 3},
@ -1245,47 +1244,47 @@ static state states_60[4] = {
}; };
static arc arcs_61_0[7] = { static arc arcs_61_0[7] = {
{13, 1}, {13, 1},
{145, 2}, {144, 2},
{148, 3}, {147, 3},
{151, 4}, {150, 4},
{19, 5}, {19, 5},
{153, 5}, {152, 5},
{154, 6}, {153, 6},
}; };
static arc arcs_61_1[3] = { static arc arcs_61_1[3] = {
{43, 7}, {43, 7},
{144, 7}, {143, 7},
{15, 5}, {15, 5},
}; };
static arc arcs_61_2[2] = { static arc arcs_61_2[2] = {
{146, 8}, {145, 8},
{147, 5}, {146, 5},
}; };
static arc arcs_61_3[2] = { static arc arcs_61_3[2] = {
{149, 9}, {148, 9},
{150, 5}, {149, 5},
}; };
static arc arcs_61_4[1] = { static arc arcs_61_4[1] = {
{152, 10}, {151, 10},
}; };
static arc arcs_61_5[1] = { static arc arcs_61_5[1] = {
{0, 5}, {0, 5},
}; };
static arc arcs_61_6[2] = { static arc arcs_61_6[2] = {
{154, 6}, {153, 6},
{0, 6}, {0, 6},
}; };
static arc arcs_61_7[1] = { static arc arcs_61_7[1] = {
{15, 5}, {15, 5},
}; };
static arc arcs_61_8[1] = { static arc arcs_61_8[1] = {
{147, 5}, {146, 5},
}; };
static arc arcs_61_9[1] = { static arc arcs_61_9[1] = {
{150, 5}, {149, 5},
}; };
static arc arcs_61_10[1] = { static arc arcs_61_10[1] = {
{151, 5}, {150, 5},
}; };
static state states_61[11] = { static state states_61[11] = {
{7, arcs_61_0}, {7, arcs_61_0},
@ -1304,7 +1303,7 @@ static arc arcs_62_0[1] = {
{26, 1}, {26, 1},
}; };
static arc arcs_62_1[3] = { static arc arcs_62_1[3] = {
{155, 2}, {154, 2},
{27, 3}, {27, 3},
{0, 1}, {0, 1},
}; };
@ -1330,7 +1329,7 @@ static arc arcs_63_0[1] = {
{26, 1}, {26, 1},
}; };
static arc arcs_63_1[3] = { static arc arcs_63_1[3] = {
{156, 2}, {155, 2},
{27, 3}, {27, 3},
{0, 1}, {0, 1},
}; };
@ -1377,7 +1376,7 @@ static state states_64[5] = {
}; };
static arc arcs_65_0[3] = { static arc arcs_65_0[3] = {
{13, 1}, {13, 1},
{145, 2}, {144, 2},
{75, 3}, {75, 3},
}; };
static arc arcs_65_1[2] = { static arc arcs_65_1[2] = {
@ -1385,7 +1384,7 @@ static arc arcs_65_1[2] = {
{15, 5}, {15, 5},
}; };
static arc arcs_65_2[1] = { static arc arcs_65_2[1] = {
{157, 6}, {156, 6},
}; };
static arc arcs_65_3[1] = { static arc arcs_65_3[1] = {
{19, 5}, {19, 5},
@ -1397,7 +1396,7 @@ static arc arcs_65_5[1] = {
{0, 5}, {0, 5},
}; };
static arc arcs_65_6[1] = { static arc arcs_65_6[1] = {
{147, 5}, {146, 5},
}; };
static state states_65[7] = { static state states_65[7] = {
{3, arcs_65_0}, {3, arcs_65_0},
@ -1409,14 +1408,14 @@ static state states_65[7] = {
{1, arcs_65_6}, {1, arcs_65_6},
}; };
static arc arcs_66_0[1] = { static arc arcs_66_0[1] = {
{158, 1}, {157, 1},
}; };
static arc arcs_66_1[2] = { static arc arcs_66_1[2] = {
{27, 2}, {27, 2},
{0, 1}, {0, 1},
}; };
static arc arcs_66_2[2] = { static arc arcs_66_2[2] = {
{158, 1}, {157, 1},
{0, 2}, {0, 2},
}; };
static state states_66[3] = { static state states_66[3] = {
@ -1438,14 +1437,14 @@ static arc arcs_67_2[2] = {
}; };
static arc arcs_67_3[3] = { static arc arcs_67_3[3] = {
{26, 5}, {26, 5},
{159, 6}, {158, 6},
{0, 3}, {0, 3},
}; };
static arc arcs_67_4[1] = { static arc arcs_67_4[1] = {
{75, 6}, {75, 6},
}; };
static arc arcs_67_5[2] = { static arc arcs_67_5[2] = {
{159, 6}, {158, 6},
{0, 5}, {0, 5},
}; };
static arc arcs_67_6[1] = { static arc arcs_67_6[1] = {
@ -1532,7 +1531,7 @@ static state states_71[5] = {
{2, arcs_71_4}, {2, arcs_71_4},
}; };
static arc arcs_72_0[1] = { static arc arcs_72_0[1] = {
{160, 1}, {159, 1},
}; };
static arc arcs_72_1[1] = { static arc arcs_72_1[1] = {
{19, 2}, {19, 2},
@ -1568,7 +1567,7 @@ static state states_72[8] = {
{1, arcs_72_7}, {1, arcs_72_7},
}; };
static arc arcs_73_0[3] = { static arc arcs_73_0[3] = {
{161, 1}, {160, 1},
{28, 2}, {28, 2},
{29, 3}, {29, 3},
}; };
@ -1583,7 +1582,7 @@ static arc arcs_73_3[1] = {
{26, 6}, {26, 6},
}; };
static arc arcs_73_4[4] = { static arc arcs_73_4[4] = {
{161, 1}, {160, 1},
{28, 2}, {28, 2},
{29, 3}, {29, 3},
{0, 4}, {0, 4},
@ -1612,7 +1611,7 @@ static arc arcs_74_0[1] = {
{26, 1}, {26, 1},
}; };
static arc arcs_74_1[3] = { static arc arcs_74_1[3] = {
{156, 2}, {155, 2},
{25, 3}, {25, 3},
{0, 1}, {0, 1},
}; };
@ -1629,8 +1628,8 @@ static state states_74[4] = {
{1, arcs_74_3}, {1, arcs_74_3},
}; };
static arc arcs_75_0[2] = { static arc arcs_75_0[2] = {
{155, 1}, {154, 1},
{163, 1}, {162, 1},
}; };
static arc arcs_75_1[1] = { static arc arcs_75_1[1] = {
{0, 1}, {0, 1},
@ -1652,7 +1651,7 @@ static arc arcs_76_3[1] = {
{104, 4}, {104, 4},
}; };
static arc arcs_76_4[2] = { static arc arcs_76_4[2] = {
{162, 5}, {161, 5},
{0, 4}, {0, 4},
}; };
static arc arcs_76_5[1] = { static arc arcs_76_5[1] = {
@ -1673,7 +1672,7 @@ static arc arcs_77_1[1] = {
{105, 2}, {105, 2},
}; };
static arc arcs_77_2[2] = { static arc arcs_77_2[2] = {
{162, 3}, {161, 3},
{0, 2}, {0, 2},
}; };
static arc arcs_77_3[1] = { static arc arcs_77_3[1] = {
@ -1686,8 +1685,8 @@ static state states_77[4] = {
{1, arcs_77_3}, {1, arcs_77_3},
}; };
static arc arcs_78_0[2] = { static arc arcs_78_0[2] = {
{156, 1}, {155, 1},
{165, 1}, {164, 1},
}; };
static arc arcs_78_1[1] = { static arc arcs_78_1[1] = {
{0, 1}, {0, 1},
@ -1709,7 +1708,7 @@ static arc arcs_79_3[1] = {
{106, 4}, {106, 4},
}; };
static arc arcs_79_4[2] = { static arc arcs_79_4[2] = {
{164, 5}, {163, 5},
{0, 4}, {0, 4},
}; };
static arc arcs_79_5[1] = { static arc arcs_79_5[1] = {
@ -1730,7 +1729,7 @@ static arc arcs_80_1[1] = {
{105, 2}, {105, 2},
}; };
static arc arcs_80_2[2] = { static arc arcs_80_2[2] = {
{164, 3}, {163, 3},
{0, 2}, {0, 2},
}; };
static arc arcs_80_3[1] = { static arc arcs_80_3[1] = {
@ -1764,7 +1763,7 @@ static state states_82[2] = {
{1, arcs_82_1}, {1, arcs_82_1},
}; };
static arc arcs_83_0[1] = { static arc arcs_83_0[1] = {
{167, 1}, {166, 1},
}; };
static arc arcs_83_1[2] = { static arc arcs_83_1[2] = {
{9, 2}, {9, 2},
@ -1780,11 +1779,11 @@ static state states_83[3] = {
}; };
static dfa dfas[84] = { static dfa dfas[84] = {
{256, "single_input", 0, 3, states_0, {256, "single_input", 0, 3, states_0,
"\004\050\014\000\000\000\000\025\074\005\023\310\011\020\004\000\300\020\222\006\201"}, "\004\050\014\000\000\000\000\025\074\005\023\310\011\020\004\000\140\010\111\203\100"},
{257, "file_input", 0, 2, states_1, {257, "file_input", 0, 2, states_1,
"\204\050\014\000\000\000\000\025\074\005\023\310\011\020\004\000\300\020\222\006\201"}, "\204\050\014\000\000\000\000\025\074\005\023\310\011\020\004\000\140\010\111\203\100"},
{258, "eval_input", 0, 3, states_2, {258, "eval_input", 0, 3, states_2,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"},
{259, "decorator", 0, 7, states_3, {259, "decorator", 0, 7, states_3,
"\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, "\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{260, "decorators", 0, 2, states_4, {260, "decorators", 0, 2, states_4,
@ -1800,13 +1799,13 @@ static dfa dfas[84] = {
{265, "fplist", 0, 3, states_9, {265, "fplist", 0, 3, states_9,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{266, "stmt", 0, 2, states_10, {266, "stmt", 0, 2, states_10,
"\000\050\014\000\000\000\000\025\074\005\023\310\011\020\004\000\300\020\222\006\201"}, "\000\050\014\000\000\000\000\025\074\005\023\310\011\020\004\000\140\010\111\203\100"},
{267, "simple_stmt", 0, 4, states_11, {267, "simple_stmt", 0, 4, states_11,
"\000\040\010\000\000\000\000\025\074\005\023\000\000\020\004\000\300\020\222\006\200"}, "\000\040\010\000\000\000\000\025\074\005\023\000\000\020\004\000\140\010\111\003\100"},
{268, "small_stmt", 0, 2, states_12, {268, "small_stmt", 0, 2, states_12,
"\000\040\010\000\000\000\000\025\074\005\023\000\000\020\004\000\300\020\222\006\200"}, "\000\040\010\000\000\000\000\025\074\005\023\000\000\020\004\000\140\010\111\003\100"},
{269, "expr_stmt", 0, 6, states_13, {269, "expr_stmt", 0, 6, states_13,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"},
{270, "augassign", 0, 2, states_14, {270, "augassign", 0, 2, states_14,
"\000\000\000\000\000\360\377\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, "\000\000\000\000\000\360\377\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{271, "print_stmt", 0, 9, states_15, {271, "print_stmt", 0, 9, states_15,
@ -1816,7 +1815,7 @@ static dfa dfas[84] = {
{273, "pass_stmt", 0, 2, states_17, {273, "pass_stmt", 0, 2, states_17,
"\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000\000"}, "\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{274, "flow_stmt", 0, 2, states_18, {274, "flow_stmt", 0, 2, states_18,
"\000\000\000\000\000\000\000\000\074\000\000\000\000\000\000\000\000\000\000\000\200"}, "\000\000\000\000\000\000\000\000\074\000\000\000\000\000\000\000\000\000\000\000\100"},
{275, "break_stmt", 0, 2, states_19, {275, "break_stmt", 0, 2, states_19,
"\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000"}, "\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000"},
{276, "continue_stmt", 0, 2, states_20, {276, "continue_stmt", 0, 2, states_20,
@ -1824,7 +1823,7 @@ static dfa dfas[84] = {
{277, "return_stmt", 0, 3, states_21, {277, "return_stmt", 0, 3, states_21,
"\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000"}, "\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000"},
{278, "yield_stmt", 0, 2, states_22, {278, "yield_stmt", 0, 2, states_22,
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\200"}, "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\100"},
{279, "raise_stmt", 0, 7, states_23, {279, "raise_stmt", 0, 7, states_23,
"\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000"}, "\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000"},
{280, "import_stmt", 0, 2, states_24, {280, "import_stmt", 0, 2, states_24,
@ -1850,7 +1849,7 @@ static dfa dfas[84] = {
{290, "assert_stmt", 0, 5, states_34, {290, "assert_stmt", 0, 5, states_34,
"\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000"}, "\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000"},
{291, "compound_stmt", 0, 2, states_35, {291, "compound_stmt", 0, 2, states_35,
"\000\010\004\000\000\000\000\000\000\000\000\310\011\000\000\000\000\000\000\000\001"}, "\000\010\004\000\000\000\000\000\000\000\000\310\011\000\000\000\000\000\000\200\000"},
{292, "if_stmt", 0, 8, states_36, {292, "if_stmt", 0, 8, states_36,
"\000\000\000\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000"}, "\000\000\000\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000"},
{293, "while_stmt", 0, 8, states_37, {293, "while_stmt", 0, 8, states_37,
@ -1866,69 +1865,69 @@ static dfa dfas[84] = {
{298, "except_clause", 0, 5, states_42, {298, "except_clause", 0, 5, states_42,
"\000\000\000\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000"}, "\000\000\000\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000"},
{299, "suite", 0, 5, states_43, {299, "suite", 0, 5, states_43,
"\004\040\010\000\000\000\000\025\074\005\023\000\000\020\004\000\300\020\222\006\200"}, "\004\040\010\000\000\000\000\025\074\005\023\000\000\020\004\000\140\010\111\003\100"},
{300, "testlist_safe", 0, 5, states_44, {300, "testlist_safe", 0, 5, states_44,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"},
{301, "old_test", 0, 2, states_45, {301, "old_test", 0, 2, states_45,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"},
{302, "old_lambdef", 0, 5, states_46, {302, "old_lambdef", 0, 5, states_46,
"\000\000\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000"}, "\000\000\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000"},
{303, "test", 0, 6, states_47, {303, "test", 0, 6, states_47,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"},
{304, "or_test", 0, 2, states_48, {304, "or_test", 0, 2, states_48,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\000\004\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\004\000\140\010\111\003\000"},
{305, "and_test", 0, 2, states_49, {305, "and_test", 0, 2, states_49,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\000\004\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\004\000\140\010\111\003\000"},
{306, "not_test", 0, 3, states_50, {306, "not_test", 0, 3, states_50,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\000\004\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\004\000\140\010\111\003\000"},
{307, "comparison", 0, 2, states_51, {307, "comparison", 0, 2, states_51,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\140\010\111\003\000"},
{308, "comp_op", 0, 4, states_52, {308, "comp_op", 0, 4, states_52,
"\000\000\000\000\000\000\000\000\000\000\010\000\000\000\344\037\000\000\000\000\000"}, "\000\000\000\000\000\000\000\000\000\000\010\000\000\000\344\017\000\000\000\000\000"},
{309, "expr", 0, 2, states_53, {309, "expr", 0, 2, states_53,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\140\010\111\003\000"},
{310, "xor_expr", 0, 2, states_54, {310, "xor_expr", 0, 2, states_54,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\140\010\111\003\000"},
{311, "and_expr", 0, 2, states_55, {311, "and_expr", 0, 2, states_55,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\140\010\111\003\000"},
{312, "shift_expr", 0, 2, states_56, {312, "shift_expr", 0, 2, states_56,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\140\010\111\003\000"},
{313, "arith_expr", 0, 2, states_57, {313, "arith_expr", 0, 2, states_57,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\140\010\111\003\000"},
{314, "term", 0, 2, states_58, {314, "term", 0, 2, states_58,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\140\010\111\003\000"},
{315, "factor", 0, 3, states_59, {315, "factor", 0, 3, states_59,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\140\010\111\003\000"},
{316, "power", 0, 4, states_60, {316, "power", 0, 4, states_60,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\111\003\000"},
{317, "atom", 0, 11, states_61, {317, "atom", 0, 11, states_61,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\111\003\000"},
{318, "listmaker", 0, 5, states_62, {318, "listmaker", 0, 5, states_62,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"},
{319, "testlist_gexp", 0, 5, states_63, {319, "testlist_gexp", 0, 5, states_63,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"},
{320, "lambdef", 0, 5, states_64, {320, "lambdef", 0, 5, states_64,
"\000\000\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000"}, "\000\000\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000"},
{321, "trailer", 0, 7, states_65, {321, "trailer", 0, 7, states_65,
"\000\040\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\002\000\000"}, "\000\040\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\001\000\000"},
{322, "subscriptlist", 0, 3, states_66, {322, "subscriptlist", 0, 3, states_66,
"\000\040\050\000\000\000\000\000\000\010\000\000\000\020\004\000\300\020\222\006\000"}, "\000\040\050\000\000\000\000\000\000\010\000\000\000\020\004\000\140\010\111\003\000"},
{323, "subscript", 0, 7, states_67, {323, "subscript", 0, 7, states_67,
"\000\040\050\000\000\000\000\000\000\010\000\000\000\020\004\000\300\020\222\006\000"}, "\000\040\050\000\000\000\000\000\000\010\000\000\000\020\004\000\140\010\111\003\000"},
{324, "sliceop", 0, 3, states_68, {324, "sliceop", 0, 3, states_68,
"\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, "\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{325, "exprlist", 0, 3, states_69, {325, "exprlist", 0, 3, states_69,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\140\010\111\003\000"},
{326, "testlist", 0, 3, states_70, {326, "testlist", 0, 3, states_70,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"},
{327, "dictmaker", 0, 5, states_71, {327, "dictmaker", 0, 5, states_71,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"},
{328, "classdef", 0, 8, states_72, {328, "classdef", 0, 8, states_72,
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\001"}, "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\200\000"},
{329, "arglist", 0, 8, states_73, {329, "arglist", 0, 8, states_73,
"\000\040\010\060\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, "\000\040\010\060\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"},
{330, "argument", 0, 4, states_74, {330, "argument", 0, 4, states_74,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"},
{331, "list_iter", 0, 2, states_75, {331, "list_iter", 0, 2, states_75,
"\000\000\000\000\000\000\000\000\000\000\000\210\000\000\000\000\000\000\000\000\000"}, "\000\000\000\000\000\000\000\000\000\000\000\210\000\000\000\000\000\000\000\000\000"},
{332, "list_for", 0, 6, states_76, {332, "list_for", 0, 6, states_76,
@ -1942,13 +1941,13 @@ static dfa dfas[84] = {
{336, "gen_if", 0, 4, states_80, {336, "gen_if", 0, 4, states_80,
"\000\000\000\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000"}, "\000\000\000\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000"},
{337, "testlist1", 0, 2, states_81, {337, "testlist1", 0, 2, states_81,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"},
{338, "encoding_decl", 0, 2, states_82, {338, "encoding_decl", 0, 2, states_82,
"\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, "\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{339, "yield_expr", 0, 3, states_83, {339, "yield_expr", 0, 3, states_83,
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\200"}, "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\100"},
}; };
static label labels[168] = { static label labels[167] = {
{0, "EMPTY"}, {0, "EMPTY"},
{256, 0}, {256, 0},
{4, 0}, {4, 0},
@ -2072,7 +2071,6 @@ static label labels[168] = {
{31, 0}, {31, 0},
{30, 0}, {30, 0},
{29, 0}, {29, 0},
{29, 0},
{1, "is"}, {1, "is"},
{310, 0}, {310, 0},
{18, 0}, {18, 0},
@ -2121,6 +2119,6 @@ static label labels[168] = {
grammar _PyParser_Grammar = { grammar _PyParser_Grammar = {
84, 84,
dfas, dfas,
{168, labels}, {167, labels},
256 256
}; };