using 16bit unicode on windows platform.

because windows using utf16 internally, by using 16bit unicode chararcter,
there is no need to convert ecl_character to wchar_t in embedding
environment.
This commit is contained in:
Yuguo Zhang 2017-08-08 14:10:58 +08:00
parent 4a3418502c
commit 74b673479e
2 changed files with 5 additions and 6 deletions

View file

@ -20,10 +20,9 @@ THREADS_OBJ=
!endif
!if "$(ECL_UNICODE)" != ""
ECL_UNICODE_FLAG=21
ECL_UCD_OBJ = ucd.obj ucd-0000.obj ucd-0016.obj \
ucd-0032.obj ucd-0048.obj ucd-0064.obj \
ucd-0080.obj ucd-0096.obj
ECL_UNICODE_FLAG=16
ECL_UCD_OBJ = ucd16.obj ucd16-0000.obj ucd16-0016.obj \
ucd16-0032.obj ucd16-0048.obj ucd16-0064.obj
!else
ECL_UNICODE_FLAG=0
!endif

View file

@ -155,11 +155,11 @@ typedef unsigned int cl_hashkey;
* The character type
*/
#ifdef ECL_UNICODE
#define ECL_CHAR_CODE_LIMIT 1114112 /* unicode character code limit */
#define ECL_CHAR_CODE_LIMIT 65536 /* unicode character code limit */
#else
#define ECL_CHAR_CODE_LIMIT 256 /* unicode character code limit */
#endif
typedef int ecl_character;
typedef short ecl_character;
typedef unsigned char ecl_base_char;
/*