Get rid of all #ifdef Py_USING_UNICODE (it is always present now).

(With the help of unifdef from freshmeat.)
This commit is contained in:
Guido van Rossum 2007-05-03 17:49:24 +00:00
parent 938ef57e26
commit 8d30cc0144
36 changed files with 10 additions and 472 deletions

View file

@ -1939,7 +1939,6 @@ digit beyond the first.
return NULL;
}
#ifdef Py_USING_UNICODE
PyObject *
PyLong_FromUnicode(Py_UNICODE *u, Py_ssize_t length, int base)
{
@ -1957,7 +1956,6 @@ PyLong_FromUnicode(Py_UNICODE *u, Py_ssize_t length, int base)
PyMem_FREE(buffer);
return result;
}
#endif
/* forward */
static PyLongObject *x_divrem
@ -3538,12 +3536,10 @@ long_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
}
return PyLong_FromString(PyString_AS_STRING(x), NULL, base);
}
#ifdef Py_USING_UNICODE
else if (PyUnicode_Check(x))
return PyLong_FromUnicode(PyUnicode_AS_UNICODE(x),
PyUnicode_GET_SIZE(x),
base);
#endif
else {
PyErr_SetString(PyExc_TypeError,
"int() can't convert non-string with explicit base");