mirror of
https://github.com/python/cpython.git
synced 2025-07-07 19:35:27 +00:00
gh-131421: fix ASDL grammar for Dict
to have an expr?*
keys field (#131419)
In the `ast` documentation for Python: * https://docs.python.org/3/library/ast.html#ast.Dict it is made clear that: > When doing dictionary unpacking using dictionary literals the expression to be expanded goes in the values list, with a `None` at the corresponding position in `keys`. Hence, `keys` is really a `expr?*` and *not* a `expr*`. Co-authored-by: Bénédikt Tran <10796600+picnixz@users.noreply.github.com> Co-authored-by: Jelle Zijlstra <jelle.zijlstra@gmail.com>
This commit is contained in:
parent
af5799f305
commit
30840706b0
3 changed files with 40 additions and 30 deletions
|
@ -63,7 +63,7 @@ module Python
|
||||||
| UnaryOp(unaryop op, expr operand)
|
| UnaryOp(unaryop op, expr operand)
|
||||||
| Lambda(arguments args, expr body)
|
| Lambda(arguments args, expr body)
|
||||||
| IfExp(expr test, expr body, expr orelse)
|
| IfExp(expr test, expr body, expr orelse)
|
||||||
| Dict(expr* keys, expr* values)
|
| Dict(expr?* keys, expr* values)
|
||||||
| Set(expr* elts)
|
| Set(expr* elts)
|
||||||
| ListComp(expr elt, comprehension* generators)
|
| ListComp(expr elt, comprehension* generators)
|
||||||
| SetComp(expr elt, comprehension* generators)
|
| SetComp(expr elt, comprehension* generators)
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
# type ::= product | sum
|
# type ::= product | sum
|
||||||
# product ::= fields ["attributes" fields]
|
# product ::= fields ["attributes" fields]
|
||||||
# fields ::= "(" { field, "," } field ")"
|
# fields ::= "(" { field, "," } field ")"
|
||||||
# field ::= TypeId ["?" | "*"] [Id]
|
# field ::= TypeId { "?" | "*" } [Id]
|
||||||
# sum ::= constructor { "|" constructor } ["attributes" fields]
|
# sum ::= constructor { "|" constructor } ["attributes" fields]
|
||||||
# constructor ::= ConstructorId [fields]
|
# constructor ::= ConstructorId [fields]
|
||||||
#
|
#
|
||||||
|
@ -20,6 +20,7 @@
|
||||||
# http://asdl.sourceforge.net/
|
# http://asdl.sourceforge.net/
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
import enum
|
||||||
import re
|
import re
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
|
@ -64,34 +65,43 @@ class Constructor(AST):
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return 'Constructor({0.name}, {0.fields})'.format(self)
|
return 'Constructor({0.name}, {0.fields})'.format(self)
|
||||||
|
|
||||||
|
class Quantifier(enum.Enum):
|
||||||
|
OPTIONAL = enum.auto()
|
||||||
|
SEQUENCE = enum.auto()
|
||||||
|
|
||||||
class Field(AST):
|
class Field(AST):
|
||||||
def __init__(self, type, name=None, seq=False, opt=False):
|
def __init__(self, type, name=None, quantifiers=None):
|
||||||
self.type = type
|
self.type = type
|
||||||
self.name = name
|
self.name = name
|
||||||
self.seq = seq
|
self.seq = False
|
||||||
self.opt = opt
|
self.opt = False
|
||||||
|
self.quantifiers = quantifiers or []
|
||||||
|
if len(self.quantifiers) > 0:
|
||||||
|
self.seq = self.quantifiers[-1] is Quantifier.SEQUENCE
|
||||||
|
self.opt = self.quantifiers[-1] is Quantifier.OPTIONAL
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
if self.seq:
|
extra = ""
|
||||||
extra = "*"
|
for mod in self.quantifiers:
|
||||||
elif self.opt:
|
if mod is Quantifier.SEQUENCE:
|
||||||
extra = "?"
|
extra += "*"
|
||||||
else:
|
elif mod is Quantifier.OPTIONAL:
|
||||||
extra = ""
|
extra += "?"
|
||||||
|
|
||||||
return "{}{} {}".format(self.type, extra, self.name)
|
return "{}{} {}".format(self.type, extra, self.name)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
if self.seq:
|
extra = ""
|
||||||
extra = ", seq=True"
|
for mod in self.quantifiers:
|
||||||
elif self.opt:
|
if mod is Quantifier.SEQUENCE:
|
||||||
extra = ", opt=True"
|
extra += ", SEQUENCE"
|
||||||
else:
|
elif mod is Quantifier.OPTIONAL:
|
||||||
extra = ""
|
extra += ", OPTIONAL"
|
||||||
|
|
||||||
if self.name is None:
|
if self.name is None:
|
||||||
return 'Field({0.type}{1})'.format(self, extra)
|
return 'Field({0.type}, quantifiers=[{1}])'.format(self, extra)
|
||||||
else:
|
else:
|
||||||
return 'Field({0.type}, {0.name}{1})'.format(self, extra)
|
return 'Field({0.type}, {0.name}, quantifiers=[{1}])'.format(self, extra)
|
||||||
|
|
||||||
class Sum(AST):
|
class Sum(AST):
|
||||||
def __init__(self, types, attributes=None):
|
def __init__(self, types, attributes=None):
|
||||||
|
@ -314,10 +324,10 @@ class ASDLParser:
|
||||||
self._match(TokenKind.LParen)
|
self._match(TokenKind.LParen)
|
||||||
while self.cur_token.kind == TokenKind.TypeId:
|
while self.cur_token.kind == TokenKind.TypeId:
|
||||||
typename = self._advance()
|
typename = self._advance()
|
||||||
is_seq, is_opt = self._parse_optional_field_quantifier()
|
quantifiers = self._parse_optional_field_quantifier()
|
||||||
id = (self._advance() if self.cur_token.kind in self._id_kinds
|
id = (self._advance() if self.cur_token.kind in self._id_kinds
|
||||||
else None)
|
else None)
|
||||||
fields.append(Field(typename, id, seq=is_seq, opt=is_opt))
|
fields.append(Field(typename, id, quantifiers=quantifiers))
|
||||||
if self.cur_token.kind == TokenKind.RParen:
|
if self.cur_token.kind == TokenKind.RParen:
|
||||||
break
|
break
|
||||||
elif self.cur_token.kind == TokenKind.Comma:
|
elif self.cur_token.kind == TokenKind.Comma:
|
||||||
|
@ -339,14 +349,14 @@ class ASDLParser:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def _parse_optional_field_quantifier(self):
|
def _parse_optional_field_quantifier(self):
|
||||||
is_seq, is_opt = False, False
|
quantifiers = []
|
||||||
if self.cur_token.kind == TokenKind.Asterisk:
|
while self.cur_token.kind in (TokenKind.Asterisk, TokenKind.Question):
|
||||||
is_seq = True
|
if self.cur_token.kind == TokenKind.Asterisk:
|
||||||
|
quantifiers.append(Quantifier.SEQUENCE)
|
||||||
|
elif self.cur_token.kind == TokenKind.Question:
|
||||||
|
quantifiers.append(Quantifier.OPTIONAL)
|
||||||
self._advance()
|
self._advance()
|
||||||
elif self.cur_token.kind == TokenKind.Question:
|
return quantifiers
|
||||||
is_opt = True
|
|
||||||
self._advance()
|
|
||||||
return is_seq, is_opt
|
|
||||||
|
|
||||||
def _advance(self):
|
def _advance(self):
|
||||||
""" Return the value of the current token and read the next one into
|
""" Return the value of the current token and read the next one into
|
||||||
|
|
4
Python/Python-ast.c
generated
4
Python/Python-ast.c
generated
|
@ -6362,7 +6362,7 @@ init_types(void *arg)
|
||||||
" | UnaryOp(unaryop op, expr operand)\n"
|
" | UnaryOp(unaryop op, expr operand)\n"
|
||||||
" | Lambda(arguments args, expr body)\n"
|
" | Lambda(arguments args, expr body)\n"
|
||||||
" | IfExp(expr test, expr body, expr orelse)\n"
|
" | IfExp(expr test, expr body, expr orelse)\n"
|
||||||
" | Dict(expr* keys, expr* values)\n"
|
" | Dict(expr?* keys, expr* values)\n"
|
||||||
" | Set(expr* elts)\n"
|
" | Set(expr* elts)\n"
|
||||||
" | ListComp(expr elt, comprehension* generators)\n"
|
" | ListComp(expr elt, comprehension* generators)\n"
|
||||||
" | SetComp(expr elt, comprehension* generators)\n"
|
" | SetComp(expr elt, comprehension* generators)\n"
|
||||||
|
@ -6419,7 +6419,7 @@ init_types(void *arg)
|
||||||
if (!state->IfExp_type) return -1;
|
if (!state->IfExp_type) return -1;
|
||||||
state->Dict_type = make_type(state, "Dict", state->expr_type, Dict_fields,
|
state->Dict_type = make_type(state, "Dict", state->expr_type, Dict_fields,
|
||||||
2,
|
2,
|
||||||
"Dict(expr* keys, expr* values)");
|
"Dict(expr?* keys, expr* values)");
|
||||||
if (!state->Dict_type) return -1;
|
if (!state->Dict_type) return -1;
|
||||||
state->Set_type = make_type(state, "Set", state->expr_type, Set_fields, 1,
|
state->Set_type = make_type(state, "Set", state->expr_type, Set_fields, 1,
|
||||||
"Set(expr* elts)");
|
"Set(expr* elts)");
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue