Fix a bug in PyUnicode_FromStringAndSize() with signed characters.

This commit is contained in:
Guido van Rossum 2007-07-19 18:21:28 +00:00
parent 814661e0d4
commit 00058aa28c

View file

@ -438,13 +438,13 @@ PyObject *PyUnicode_FromStringAndSize(const char *u, Py_ssize_t size)
/* Single characters are shared when using this constructor */ /* Single characters are shared when using this constructor */
if (size == 1) { if (size == 1) {
unicode = unicode_latin1[(int)*u]; unicode = unicode_latin1[Py_CHARMASK(*u)];
if (!unicode) { if (!unicode) {
unicode = _PyUnicode_New(1); unicode = _PyUnicode_New(1);
if (!unicode) if (!unicode)
return NULL; return NULL;
unicode->str[0] = *u; unicode->str[0] = Py_CHARMASK(*u);
unicode_latin1[(int)*u] = unicode; unicode_latin1[Py_CHARMASK(*u)] = unicode;
} }
Py_INCREF(unicode); Py_INCREF(unicode);
return (PyObject *)unicode; return (PyObject *)unicode;
@ -459,7 +459,7 @@ PyObject *PyUnicode_FromStringAndSize(const char *u, Py_ssize_t size)
if (u != NULL) { if (u != NULL) {
Py_UNICODE *p = unicode->str; Py_UNICODE *p = unicode->str;
while (size--) while (size--)
*p++ = *u++; *p++ = Py_CHARMASK(*u++);
/* Don't need to write trailing 0 because /* Don't need to write trailing 0 because
that's already done by _PyUnicode_New */ that's already done by _PyUnicode_New */
} }