mirror of
https://github.com/python/cpython.git
synced 2025-07-07 19:35:27 +00:00
gh-132661: Implement PEP 750 (#132662)
Co-authored-by: Lysandros Nikolaou <lisandrosnik@gmail.com> Co-authored-by: Bénédikt Tran <10796600+picnixz@users.noreply.github.com> Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Co-authored-by: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com> Co-authored-by: Wingy <git@wingysam.xyz> Co-authored-by: Koudai Aono <koxudaxi@gmail.com> Co-authored-by: Dave Peck <davepeck@gmail.com> Co-authored-by: Terry Jan Reedy <tjreedy@udel.edu> Co-authored-by: Paul Everitt <pauleveritt@me.com> Co-authored-by: sobolevn <mail@sobolevn.me>
This commit is contained in:
parent
5ea9010e89
commit
60202609a2
81 changed files with 7716 additions and 3761 deletions
13
Lib/token.py
generated
13
Lib/token.py
generated
|
@ -66,12 +66,15 @@ SOFT_KEYWORD = 58
|
|||
FSTRING_START = 59
|
||||
FSTRING_MIDDLE = 60
|
||||
FSTRING_END = 61
|
||||
COMMENT = 62
|
||||
NL = 63
|
||||
TSTRING_START = 62
|
||||
TSTRING_MIDDLE = 63
|
||||
TSTRING_END = 64
|
||||
COMMENT = 65
|
||||
NL = 66
|
||||
# These aren't used by the C tokenizer but are needed for tokenize.py
|
||||
ERRORTOKEN = 64
|
||||
ENCODING = 65
|
||||
N_TOKENS = 66
|
||||
ERRORTOKEN = 67
|
||||
ENCODING = 68
|
||||
N_TOKENS = 69
|
||||
# Special definitions for cooperation with parser
|
||||
NT_OFFSET = 256
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue