From: Andy Heninger Date: Fri, 24 Feb 2017 00:17:13 +0000 (+0000) Subject: ICU-12645 char16_t clean-ups to umachine.h X-Git-Tag: release-59-rc~140 X-Git-Url: https://granicus.if.org/sourcecode?a=commitdiff_plain;h=8c61bdb5456af24a831283d6aa20def08c1dd01f;p=icu ICU-12645 char16_t clean-ups to umachine.h X-SVN-Rev: 39700 --- diff --git a/icu4c/source/common/unicode/platform.h b/icu4c/source/common/unicode/platform.h index afaeee6ab18..d2be366e919 100644 --- a/icu4c/source/common/unicode/platform.h +++ b/icu4c/source/common/unicode/platform.h @@ -764,6 +764,7 @@ * gcc 4.4 defines the __CHAR16_TYPE__ macro to a usable type but * does not support u"abc" string literals. * C++11 and C11 require support for UTF-16 literals + * TODO: Fix for plain C. Doesn't work on Mac. */ # if U_CPLUSPLUS_VERSION >= 11 || (defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201112L) # define U_HAVE_CHAR16_T 1 diff --git a/icu4c/source/common/unicode/umachine.h b/icu4c/source/common/unicode/umachine.h index bcd07a6e23a..3dab79a825f 100644 --- a/icu4c/source/common/unicode/umachine.h +++ b/icu4c/source/common/unicode/umachine.h @@ -51,29 +51,6 @@ */ #include -/* - * U_USE_CHAR16_T - * When set, force use of char16_t for UChar. - * Note: char16_t is expected to become the default and required in the future, - * and this option will be removed. - * - * Note: for plain C, #include should define char16_t, - * but Macintosh Xcode does not yet implement it. - * @internal - */ -#ifndef U_USE_CHAR16_T -#define U_USE_CHAR16_T 1 -#endif - -#if U_USE_CHAR16_T -#ifdef __cplusplus -#ifdef UCHAR_TYPE -#undef UCHAR_TYPE -#endif -#define UCHAR_TYPE char16_t -#endif /* __cpluplus */ -#endif /* U_USE_CHAR16_t */ - /*==========================================================================*/ /* For C wrappers, we use the symbol U_STABLE. */ /* This works properly if the includer is C or C++. */ @@ -315,7 +292,10 @@ typedef int8_t UBool; /** * \var UChar - * Define UChar to be UCHAR_TYPE, if that is #defined (for example, to char16_t), + * + * For C++, UChar is always defined to be char16_t. + * + * For plain C, define UChar to be UCHAR_TYPE, if that is #defined (for example, to char16_t), * or wchar_t if that is 16 bits wide; always assumed to be unsigned. * If neither is available, then define UChar to be uint16_t. * @@ -325,12 +305,14 @@ typedef int8_t UBool; * * @stable ICU 4.4 */ -#if defined(UCHAR_TYPE) +#ifdef __cplusplus + typedef char16_t UChar; +#elif defined(UCHAR_TYPE) typedef UCHAR_TYPE UChar; -/* Not #elif U_HAVE_CHAR16_T -- because that is type-incompatible with pre-C++11 callers - typedef char16_t UChar; */ #elif U_SIZEOF_WCHAR_T==2 typedef wchar_t UChar; +#elif U_HAVE_CHAR16_T + typedef char16_t UChar; #elif defined(__CHAR16_TYPE__) typedef __CHAR16_TYPE__ UChar; #else