[3.12] gh-121130: Fix f-string format specifiers with debug expressions (GH-121150) (#122063)

This commit is contained in:
Pablo Galindo Salgado 2024-07-20 19:05:01 +02:00 committed by GitHub
parent ca531e4326
commit a9daa4fd04
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 8569 additions and 5795 deletions

View file

@ -302,9 +302,7 @@ Literals
Name(id='a', ctx=Load())],
keywords=[]),
conversion=-1,
format_spec=JoinedStr(
values=[
Constant(value='.3')]))]))
format_spec=Constant(value='.3'))]))
.. class:: List(elts, ctx)

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,2 @@
Fix f-strings with debug expressions in format specifiers. Patch by Pablo
Galindo

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -14,17 +14,15 @@ extern "C" {
#define MAXLEVEL 200 /* Max parentheses level */
#define MAXFSTRINGLEVEL 150 /* Max f-string nesting level */
enum decoding_state {
STATE_INIT,
STATE_SEEK_CODING,
STATE_NORMAL
};
enum decoding_state { STATE_INIT, STATE_SEEK_CODING, STATE_NORMAL };
enum interactive_underflow_t {
/* Normal mode of operation: return a new token when asked in interactive mode */
/* Normal mode of operation: return a new token when asked in interactive mode
*/
IUNDERFLOW_NORMAL,
/* Forcefully return ENDMARKER when asked for a new token in interactive mode. This
* can be used to prevent the tokenizer to prompt the user for new tokens */
/* Forcefully return ENDMARKER when asked for a new token in interactive mode.
* This can be used to prevent the tokenizer to prompt the user for new tokens
*/
IUNDERFLOW_STOP,
};
@ -62,18 +60,22 @@ typedef struct _tokenizer_mode {
Py_ssize_t last_expr_end;
char *last_expr_buffer;
int f_string_debug;
int in_format_spec;
} tokenizer_mode;
/* Tokenizer state */
struct tok_state {
/* Input state; buf <= cur <= inp <= end */
/* NB an entire line is held in the buffer */
char *buf; /* Input buffer, or NULL; malloc'ed if fp != NULL or readline != NULL */
char *buf; /* Input buffer, or NULL; malloc'ed if fp != NULL or readline !=
NULL */
char *cur; /* Next character in buffer */
char *inp; /* End of data in buffer */
int fp_interactive; /* If the file descriptor is interactive */
char *interactive_src_start; /* The start of the source parsed so far in interactive mode */
char *interactive_src_end; /* The end of the source parsed so far in interactive mode */
char *interactive_src_start; /* The start of the source parsed so far in
interactive mode */
char *interactive_src_end; /* The end of the source parsed so far in
interactive mode */
const char *end; /* End of input buffer if buf != NULL */
const char *start; /* Start of current token if not NULL */
int done; /* E_OK normally, E_EOF at EOF, otherwise error code */
@ -138,7 +140,8 @@ struct tok_state {
extern struct tok_state *_PyTokenizer_FromString(const char *, int, int);
extern struct tok_state *_PyTokenizer_FromUTF8(const char *, int, int);
extern struct tok_state *_PyTokenizer_FromReadline(PyObject*, const char*, int, int);
extern struct tok_state *_PyTokenizer_FromReadline(PyObject *, const char *,
int, int);
extern struct tok_state *_PyTokenizer_FromFile(FILE *, const char *,
const char *, const char *);
extern void _PyTokenizer_Free(struct tok_state *);