Will backport to 2.4.
### error handlers
-strict_errors = lookup_error("strict")
-ignore_errors = lookup_error("ignore")
-replace_errors = lookup_error("replace")
-xmlcharrefreplace_errors = lookup_error("xmlcharrefreplace")
-backslashreplace_errors = lookup_error("backslashreplace")
+try:
+ strict_errors = lookup_error("strict")
+ ignore_errors = lookup_error("ignore")
+ replace_errors = lookup_error("replace")
+ xmlcharrefreplace_errors = lookup_error("xmlcharrefreplace")
+ backslashreplace_errors = lookup_error("backslashreplace")
+except LookupError:
+ # In --disable-unicode builds, these error handler are missing
+ strict_errors = None
+ ignore_errors = None
+ replace_errors = None
+ xmlcharrefreplace_errors = None
+ backslashreplace_errors = None
# Tell modulefinder that using codecs probably needs the encodings
# package
d[bool] = _deepcopy_atomic
try:
d[complex] = _deepcopy_atomic
-except AttributeError:
+except NameError:
pass
d[str] = _deepcopy_atomic
try:
d[unicode] = _deepcopy_atomic
-except AttributeError:
+except NameError:
pass
try:
d[types.CodeType] = _deepcopy_atomic
TESTFN_UNICODE_UNENCODEABLE = None
else:
# Japanese characters (I think - from bug 846133)
- TESTFN_UNICODE_UNENCODEABLE = u"@test-\u5171\u6709\u3055\u308c\u308b"
+ TESTFN_UNICODE_UNENCODEABLE = eval('u"@test-\u5171\u6709\u3055\u308c\u308b"')
try:
# XXX - Note - should be using TESTFN_ENCODING here - but for
# Windows, "mbcs" currently always operates as if in
Build
-----
+- Bug #1158607: Build with --disable-unicode again.
+
- spwdmodule.c is built only if either HAVE_GETSPNAM or HAVE_HAVE_GETSPENT is
defined. Discovered as a result of not being able to build on OS X.
if (!PyArg_ParseTuple(args, "O|ss:encode", &v, &encoding, &errors))
return NULL;
+#ifdef Py_USING_UNICODE
if (encoding == NULL)
encoding = PyUnicode_GetDefaultEncoding();
+#else
+ if (encoding == NULL) {
+ PyErr_SetString(PyExc_ValueError, "no encoding specified");
+ return NULL;
+ }
+#endif
/* Encode via the codec registry */
v = PyCodec_Encode(v, encoding, errors);
if (!PyArg_ParseTuple(args, "O|ss:decode", &v, &encoding, &errors))
return NULL;
+#ifdef Py_USING_UNICODE
if (encoding == NULL)
encoding = PyUnicode_GetDefaultEncoding();
+#else
+ if (encoding == NULL) {
+ PyErr_SetString(PyExc_ValueError, "no encoding specified");
+ return NULL;
+ }
+#endif
/* Decode via the codec registry */
v = PyCodec_Decode(v, encoding, errors);
};
static PyMethodDef PyTclObject_methods[] = {
+#ifdef Py_USING_UNICODE
{"__unicode__", (PyCFunction)PyTclObject_unicode, METH_NOARGS,
PyTclObject_unicode__doc__},
+#endif
{0}
};
}
}
#else
- res = PyString_FromStringAndSize(value->bytes, value->length);
+ result = PyString_FromStringAndSize(value->bytes, value->length);
#endif
return result;
}
))
# Hye-Shik Chang's CJKCodecs modules.
- exts.append(Extension('_multibytecodec',
- ['cjkcodecs/multibytecodec.c']))
- for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
- exts.append(Extension('_codecs_' + loc,
- ['cjkcodecs/_codecs_%s.c' % loc]))
+ if have_unicode:
+ exts.append(Extension('_multibytecodec',
+ ['cjkcodecs/multibytecodec.c']))
+ for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
+ exts.append(Extension('_codecs_' + loc,
+ ['cjkcodecs/_codecs_%s.c' % loc]))
# Dynamic loading module
if sys.maxint == 0x7fffffff: