gh-105069: Add a readline-like callable to the tokenizer to consume input iteratively (#105070)

This commit is contained in:
Pablo Galindo Salgado 2023-05-30 22:43:34 +01:00 committed by GitHub
parent 2ea34cfb3a
commit 9216e69a87
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
7 changed files with 276 additions and 98 deletions

View file

@ -37,15 +37,17 @@ typedef struct
@classmethod
_tokenizer.tokenizeriter.__new__ as tokenizeriter_new
source: str
readline: object
/
*
extra_tokens: bool
encoding: str(c_default="NULL") = 'utf-8'
[clinic start generated code]*/
static PyObject *
tokenizeriter_new_impl(PyTypeObject *type, const char *source,
int extra_tokens)
/*[clinic end generated code: output=f6f9d8b4beec8106 input=90dc5b6a5df180c2]*/
tokenizeriter_new_impl(PyTypeObject *type, PyObject *readline,
int extra_tokens, const char *encoding)
/*[clinic end generated code: output=7501a1211683ce16 input=f7dddf8a613ae8bd]*/
{
tokenizeriterobject *self = (tokenizeriterobject *)type->tp_alloc(type, 0);
if (self == NULL) {
@ -55,7 +57,7 @@ tokenizeriter_new_impl(PyTypeObject *type, const char *source,
if (filename == NULL) {
return NULL;
}
self->tok = _PyTokenizer_FromUTF8(source, 1, 1);
self->tok = _PyTokenizer_FromReadline(readline, encoding, 1, 1);
if (self->tok == NULL) {
Py_DECREF(filename);
return NULL;