Fix compile warning when appending UTF-8/ASCII strings to ustring16

String constants are (const char *), but UTF-8 strings must be treated as
an array of bytes when fiddling with the bits.

The following comparison fails without this change, as uchar8_t is a
signed char, which cannot be 0xC0:

   const uchar8_t* c2 = other;
   ...
   else if (c2[l] == 0xC0 || c2[l] == 0xC1)
   ...
This commit is contained in:
Ben Gardner 2023-04-15 02:43:40 -05:00 committed by GitHub
parent 896c825b9e
commit c4ab49201b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -1323,7 +1323,7 @@ public:
// Determine if the string is long enough for a BOM. // Determine if the string is long enough for a BOM.
u32 len = 0; u32 len = 0;
const uchar8_t* p = other; const u8* p = reinterpret_cast<const u8*>(other);
do do
{ {
++len; ++len;
@ -1338,10 +1338,10 @@ public:
} }
// If a BOM was found, don't include it in the string. // If a BOM was found, don't include it in the string.
const uchar8_t* c2 = other; const u8* c2 = reinterpret_cast<const u8*>(other);
if (c_bom != unicode::EUTFE_NONE) if (c_bom != unicode::EUTFE_NONE)
{ {
c2 = other + unicode::BOM_UTF8_LEN; c2 += unicode::BOM_UTF8_LEN;
length -= unicode::BOM_UTF8_LEN; length -= unicode::BOM_UTF8_LEN;
} }