mirror of
https://github.com/python/cpython.git
synced 2025-07-24 11:44:31 +00:00

svn+ssh://pythondev@svn.python.org/python/trunk ................ r68288 | benjamin.peterson | 2009-01-03 18:39:07 -0600 (Sat, 03 Jan 2009) | 1 line only check the actual compile() call for a SyntaxError ................ r68289 | georg.brandl | 2009-01-04 02:26:10 -0600 (Sun, 04 Jan 2009) | 2 lines Test commit. ................ r68290 | georg.brandl | 2009-01-04 04:23:49 -0600 (Sun, 04 Jan 2009) | 4 lines Add "suspicious" builder which finds leftover markup in the HTML files. Patch by Gabriel Genellina. ................ r68291 | georg.brandl | 2009-01-04 04:24:09 -0600 (Sun, 04 Jan 2009) | 2 lines Fix two issues found by the suspicious builder. ................ r68325 | benjamin.peterson | 2009-01-04 16:00:18 -0600 (Sun, 04 Jan 2009) | 1 line use Jinja 2.1.1 ................ r68326 | georg.brandl | 2009-01-04 16:03:10 -0600 (Sun, 04 Jan 2009) | 2 lines Update make.bat. ................ r68338 | neal.norwitz | 2009-01-04 21:57:25 -0600 (Sun, 04 Jan 2009) | 1 line Make sure to checkout any new packages ................ r68388 | benjamin.peterson | 2009-01-07 21:39:46 -0600 (Wed, 07 Jan 2009) | 1 line string exceptions are gone ................ r68393 | benjamin.peterson | 2009-01-07 22:01:00 -0600 (Wed, 07 Jan 2009) | 1 line use new sphinx modules ................ r68423 | benjamin.peterson | 2009-01-08 20:13:34 -0600 (Thu, 08 Jan 2009) | 29 lines Merged revisions 68306-68308,68340,68368,68422 via svnmerge from svn+ssh://pythondev@svn.python.org/sandbox/trunk/2to3/lib2to3 ........ r68306 | benjamin.peterson | 2009-01-04 12:27:19 -0600 (Sun, 04 Jan 2009) | 1 line fix_urllib: add mappings for the url parsing functions ........ r68307 | benjamin.peterson | 2009-01-04 12:30:01 -0600 (Sun, 04 Jan 2009) | 1 line remove duplicated function ........ r68308 | benjamin.peterson | 2009-01-04 12:50:34 -0600 (Sun, 04 Jan 2009) | 1 line turtle is no longer renamed ........ r68340 | georg.brandl | 2009-01-05 02:11:39 -0600 (Mon, 05 Jan 2009) | 2 lines Fix undefined locals in parse_tokens(). ........ r68368 | benjamin.peterson | 2009-01-06 17:56:10 -0600 (Tue, 06 Jan 2009) | 1 line fix typo (thanks to Robert Lehmann) ........ r68422 | benjamin.peterson | 2009-01-08 20:01:03 -0600 (Thu, 08 Jan 2009) | 1 line run the imports fixers after fix_import, so fix_import doesn't try to make stdlib renames into relative imports #4876 ........ ................
146 lines
4.7 KiB
Python
146 lines
4.7 KiB
Python
# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
|
|
# Licensed to PSF under a Contributor Agreement.
|
|
|
|
# Modifications:
|
|
# Copyright 2006 Google, Inc. All Rights Reserved.
|
|
# Licensed to PSF under a Contributor Agreement.
|
|
|
|
"""Parser driver.
|
|
|
|
This provides a high-level interface to parse a file into a syntax tree.
|
|
|
|
"""
|
|
|
|
__author__ = "Guido van Rossum <guido@python.org>"
|
|
|
|
__all__ = ["Driver", "load_grammar"]
|
|
|
|
# Python imports
|
|
import os
|
|
import logging
|
|
import sys
|
|
|
|
# Pgen imports
|
|
from . import grammar, parse, token, tokenize, pgen
|
|
|
|
|
|
class Driver(object):
|
|
|
|
def __init__(self, grammar, convert=None, logger=None):
|
|
self.grammar = grammar
|
|
if logger is None:
|
|
logger = logging.getLogger()
|
|
self.logger = logger
|
|
self.convert = convert
|
|
|
|
def parse_tokens(self, tokens, debug=False):
|
|
"""Parse a series of tokens and return the syntax tree."""
|
|
# XXX Move the prefix computation into a wrapper around tokenize.
|
|
p = parse.Parser(self.grammar, self.convert)
|
|
p.setup()
|
|
lineno = 1
|
|
column = 0
|
|
type = value = start = end = line_text = None
|
|
prefix = ""
|
|
for quintuple in tokens:
|
|
type, value, start, end, line_text = quintuple
|
|
if start != (lineno, column):
|
|
assert (lineno, column) <= start, ((lineno, column), start)
|
|
s_lineno, s_column = start
|
|
if lineno < s_lineno:
|
|
prefix += "\n" * (s_lineno - lineno)
|
|
lineno = s_lineno
|
|
column = 0
|
|
if column < s_column:
|
|
prefix += line_text[column:s_column]
|
|
column = s_column
|
|
if type in (tokenize.COMMENT, tokenize.NL):
|
|
prefix += value
|
|
lineno, column = end
|
|
if value.endswith("\n"):
|
|
lineno += 1
|
|
column = 0
|
|
continue
|
|
if type == token.OP:
|
|
type = grammar.opmap[value]
|
|
if debug:
|
|
self.logger.debug("%s %r (prefix=%r)",
|
|
token.tok_name[type], value, prefix)
|
|
if p.addtoken(type, value, (prefix, start)):
|
|
if debug:
|
|
self.logger.debug("Stop.")
|
|
break
|
|
prefix = ""
|
|
lineno, column = end
|
|
if value.endswith("\n"):
|
|
lineno += 1
|
|
column = 0
|
|
else:
|
|
# We never broke out -- EOF is too soon (how can this happen???)
|
|
raise parse.ParseError("incomplete input",
|
|
type, value, (prefix, start))
|
|
return p.rootnode
|
|
|
|
def parse_stream_raw(self, stream, debug=False):
|
|
"""Parse a stream and return the syntax tree."""
|
|
tokens = tokenize.generate_tokens(stream.readline)
|
|
return self.parse_tokens(tokens, debug)
|
|
|
|
def parse_stream(self, stream, debug=False):
|
|
"""Parse a stream and return the syntax tree."""
|
|
return self.parse_stream_raw(stream, debug)
|
|
|
|
def parse_file(self, filename, debug=False):
|
|
"""Parse a file and return the syntax tree."""
|
|
stream = open(filename)
|
|
try:
|
|
return self.parse_stream(stream, debug)
|
|
finally:
|
|
stream.close()
|
|
|
|
def parse_string(self, text, debug=False):
|
|
"""Parse a string and return the syntax tree."""
|
|
tokens = tokenize.generate_tokens(generate_lines(text).__next__)
|
|
return self.parse_tokens(tokens, debug)
|
|
|
|
|
|
def generate_lines(text):
|
|
"""Generator that behaves like readline without using StringIO."""
|
|
for line in text.splitlines(True):
|
|
yield line
|
|
while True:
|
|
yield ""
|
|
|
|
|
|
def load_grammar(gt="Grammar.txt", gp=None,
|
|
save=True, force=False, logger=None):
|
|
"""Load the grammar (maybe from a pickle)."""
|
|
if logger is None:
|
|
logger = logging.getLogger()
|
|
if gp is None:
|
|
head, tail = os.path.splitext(gt)
|
|
if tail == ".txt":
|
|
tail = ""
|
|
gp = head + tail + ".".join(map(str, sys.version_info)) + ".pickle"
|
|
if force or not _newer(gp, gt):
|
|
logger.info("Generating grammar tables from %s", gt)
|
|
g = pgen.generate_grammar(gt)
|
|
if save:
|
|
logger.info("Writing grammar tables to %s", gp)
|
|
try:
|
|
g.dump(gp)
|
|
except IOError as e:
|
|
logger.info("Writing failed:"+str(e))
|
|
else:
|
|
g = grammar.Grammar()
|
|
g.load(gp)
|
|
return g
|
|
|
|
|
|
def _newer(a, b):
|
|
"""Inquire whether file a was written since file b."""
|
|
if not os.path.exists(a):
|
|
return False
|
|
if not os.path.exists(b):
|
|
return True
|
|
return os.path.getmtime(a) >= os.path.getmtime(b)
|