#endif /* !defined(wxUSE_WCHAR_T) */
/* Unicode support requires wchar_t */
-#if wxUSE_UNICODE && !wxUSE_WCHAR_T
- #error "wchar_t must be available in Unicode build"
+#if !wxUSE_WCHAR_T
+ #error "wchar_t must be available"
#endif /* Unicode */
/*
#ifdef HAVE_WIDEC_H
#include <widec.h>
#endif
-
- #if !defined(__GNUC__) || defined(__DARWIN__)
- #define wxWINT_T_IS_TYPEDEF
- #endif
#endif /* wxUSE_WCHAR_T */
/* -------------------------------------------------------------------------- */
typedef char wxChar;
typedef signed char wxSChar;
typedef unsigned char wxUChar;
-#else /* Unicode */
+#else
/* VZ: note that VC++ defines _T[SU]CHAR simply as wchar_t and not as */
/* signed/unsigned version of it which (a) makes sense to me (unlike */
/* char wchar_t is always unsigned) and (b) was how the previous */
/* Sun's SunPro compiler supports the wchar_t type and wide character */
/* functions, but does not define __WCHAR_TYPE__. Define it here to */
/* allow unicode enabled builds. */
- #if defined(__SUNPRO_CC) || defined(__SUNPRO_C)
- #define __WCHAR_TYPE__ wxchar_t
+ #if (defined(__SUNPRO_CC) || defined(__SUNPRO_C)) && !defined(__WCHAR_TYPE__)
+ #define __WCHAR_TYPE__ wxchar_t
#endif
/* GNU libc has __WCHAR_TYPE__ which requires special treatment, see */
#endif /* __WCHAR_TYPE__ */
#endif /* ASCII/Unicode */
+/* ------------------------------------------------------------------------- */
+/* define wxStringCharType */
+/* ------------------------------------------------------------------------- */
+
+/* depending on the platform, Unicode build can either store wxStrings as
+ wchar_t* or UTF-8 encoded char*: */
+#if wxUSE_UNICODE
+ /* FIXME-UTF8: what would be better place for this? */
+ #if defined(wxUSE_UTF8_LOCALE_ONLY) && !defined(wxUSE_UNICODE_UTF8)
+ #error "wxUSE_UTF8_LOCALE_ONLY only makes sense with wxUSE_UNICODE_UTF8"
+ #endif
+ #ifndef wxUSE_UTF8_LOCALE_ONLY
+ #define wxUSE_UTF8_LOCALE_ONLY 0
+ #endif
+
+ #ifndef wxUSE_UNICODE_UTF8
+ #define wxUSE_UNICODE_UTF8 0
+ #endif
+
+ #if wxUSE_UNICODE_UTF8
+ #define wxUSE_UNICODE_WCHAR 0
+ #else
+ #define wxUSE_UNICODE_WCHAR 1
+ #endif
+#else
+ #define wxUSE_UNICODE_WCHAR 0
+ #define wxUSE_UNICODE_UTF8 0
+ #define wxUSE_UTF8_LOCALE_ONLY 0
+#endif
+
+/* define char type used by wxString internal representation: */
+#if wxUSE_UNICODE_WCHAR
+ typedef wchar_t wxStringCharType;
+#else /* wxUSE_UNICODE_UTF8 || ANSI */
+ typedef char wxStringCharType;
+#endif
+
+
/* ------------------------------------------------------------------------- */
/* define _T() and related macros */
/* ------------------------------------------------------------------------- */
#endif /* ASCII/Unicode */
#endif /* !defined(_T) */
+/*
+ wxS ("wx string") macro can be used to create literals using the same
+ representation as wxString does internally, i.e. wchar_t in Unicode build
+ under Windows or char in UTF-8-based Unicode builds and (deprecated) ANSI
+ builds everywhere (see wxStringCharType definition above).
+ */
+#if wxUSE_UNICODE_WCHAR
+ #define wxS(x) wxCONCAT_HELPER(L, x)
+#else /* wxUSE_UNICODE_UTF8 || ANSI */
+ #define wxS(x) x
+#endif
+
/* although global macros with such names are normally bad, we want to have */
/* another name for _T() which should be used to avoid confusion between */
/* _T() and _() in wxWidgets sources */