lib2to3.pgen3.driver.load_grammar() now creates a stable cache file

between runs given the same Grammar.txt input regardless of the hash
randomization setting.
This commit is contained in:
Gregory P. Smith ext:(%20%5BGoogle%20Inc.%5D) 2016-09-08 00:40:07 +00:00
parent d61910c598
commit dd1c638b92
6 changed files with 115 additions and 18 deletions

View file

@ -39,7 +39,7 @@ class ParserGenerator(object):
states = []
for state in dfa:
arcs = []
for label, next in state.arcs.items():
for label, next in sorted(state.arcs.items()):
arcs.append((self.make_label(c, label), dfa.index(next)))
if state.isfinal:
arcs.append((0, dfa.index(state)))
@ -52,7 +52,7 @@ class ParserGenerator(object):
def make_first(self, c, name):
rawfirst = self.first[name]
first = {}
for label in rawfirst:
for label in sorted(rawfirst):
ilabel = self.make_label(c, label)
##assert ilabel not in first # XXX failed on <> ... !=
first[ilabel] = 1
@ -192,7 +192,7 @@ class ParserGenerator(object):
for label, next in nfastate.arcs:
if label is not None:
addclosure(next, arcs.setdefault(label, {}))
for label, nfaset in arcs.items():
for label, nfaset in sorted(arcs.items()):
for st in states:
if st.nfaset == nfaset:
break
@ -222,7 +222,7 @@ class ParserGenerator(object):
print("Dump of DFA for", name)
for i, state in enumerate(dfa):
print(" State", i, state.isfinal and "(final)" or "")
for label, next in state.arcs.items():
for label, next in sorted(state.arcs.items()):
print(" %s -> %d" % (label, dfa.index(next)))
def simplify_dfa(self, dfa):