mirror of
https://github.com/python/cpython.git
synced 2025-07-23 19:25:40 +00:00
[3.10] bpo-46240: Correct the error for unclosed parentheses when the tokenizer is not finished (GH-30378). (GH-30819)
(cherry picked from commit 70f415fb8b
)
Co-authored-by: Pablo Galindo Salgado <Pablogsal@gmail.com>
This commit is contained in:
parent
f66ef3eab6
commit
633db1c4eb
4 changed files with 9 additions and 2 deletions
|
@ -227,7 +227,7 @@ class ExceptionTests(unittest.TestCase):
|
||||||
check('x = "a', 1, 5)
|
check('x = "a', 1, 5)
|
||||||
check('lambda x: x = 2', 1, 1)
|
check('lambda x: x = 2', 1, 1)
|
||||||
check('f{a + b + c}', 1, 2)
|
check('f{a + b + c}', 1, 2)
|
||||||
check('[file for str(file) in []\n])', 1, 11)
|
check('[file for str(file) in []\n]', 1, 11)
|
||||||
check('a = « hello » « world »', 1, 5)
|
check('a = « hello » « world »', 1, 5)
|
||||||
check('[\nfile\nfor str(file)\nin\n[]\n]', 3, 5)
|
check('[\nfile\nfor str(file)\nin\n[]\n]', 3, 5)
|
||||||
check('[file for\n str(file) in []]', 2, 2)
|
check('[file for\n str(file) in []]', 2, 2)
|
||||||
|
|
|
@ -1513,6 +1513,9 @@ def func2():
|
||||||
for paren in "([{":
|
for paren in "([{":
|
||||||
self._check_error(paren + "1 + 2", f"\\{paren}' was never closed")
|
self._check_error(paren + "1 + 2", f"\\{paren}' was never closed")
|
||||||
|
|
||||||
|
for paren in "([{":
|
||||||
|
self._check_error(f"a = {paren} 1, 2, 3\nb=3", f"\\{paren}' was never closed")
|
||||||
|
|
||||||
for paren in ")]}":
|
for paren in ")]}":
|
||||||
self._check_error(paren + "1 + 2", f"unmatched '\\{paren}'")
|
self._check_error(paren + "1 + 2", f"unmatched '\\{paren}'")
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
Correct the error message for unclosed parentheses when the tokenizer
|
||||||
|
doesn't reach the end of the source when the error is reported. Patch by
|
||||||
|
Pablo Galindo
|
|
@ -1342,7 +1342,8 @@ _PyPegen_run_parser(Parser *p)
|
||||||
if (PyErr_Occurred()) {
|
if (PyErr_Occurred()) {
|
||||||
// Prioritize tokenizer errors to custom syntax errors raised
|
// Prioritize tokenizer errors to custom syntax errors raised
|
||||||
// on the second phase only if the errors come from the parser.
|
// on the second phase only if the errors come from the parser.
|
||||||
if (p->tok->done == E_DONE && PyErr_ExceptionMatches(PyExc_SyntaxError)) {
|
int is_tok_ok = (p->tok->done == E_DONE || p->tok->done == E_OK);
|
||||||
|
if (is_tok_ok && PyErr_ExceptionMatches(PyExc_SyntaxError)) {
|
||||||
_PyPegen_check_tokenizer_errors(p);
|
_PyPegen_check_tokenizer_errors(p);
|
||||||
}
|
}
|
||||||
return NULL;
|
return NULL;
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue