mirror of
https://github.com/python/cpython.git
synced 2025-07-07 19:35:27 +00:00
gh-134675: Add t-string prefixes to tokenizer module, lexical analysis doc, and add a test to make sure we catch this error in the future. (#134734)
* Add t-string prefixes to _all_string_prefixes, and add a test to make sure we catch this error in the future. * Update lexical analysis docs for t-string prefixes.
This commit is contained in:
parent
c60f39ada6
commit
08c78e02fa
3 changed files with 59 additions and 2 deletions
|
@ -86,7 +86,7 @@ def _all_string_prefixes():
|
|||
# The valid string prefixes. Only contain the lower case versions,
|
||||
# and don't contain any permutations (include 'fr', but not
|
||||
# 'rf'). The various permutations will be generated.
|
||||
_valid_string_prefixes = ['b', 'r', 'u', 'f', 'br', 'fr']
|
||||
_valid_string_prefixes = ['b', 'r', 'u', 'f', 't', 'br', 'fr', 'tr']
|
||||
# if we add binary f-strings, add: ['fb', 'fbr']
|
||||
result = {''}
|
||||
for prefix in _valid_string_prefixes:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue