Why does Visual C++ 2017 implementation of std::codecvt_utf8_utf16<>::do_max_length() returns 6 (assuming codecvt_mode is 0)?
// CLASS codecvt_utf8_utf16
template<class _Elem,
unsigned long _Mymax = 0x10ffff,
codecvt_mode _Mymode = (codecvt_mode)0>
class codecvt_utf8_utf16
: public codecvt<_Elem, char, _Statype>
{ // facet for converting between UTF-16 _Elem and UTF-8 byte sequences
// ...
virtual int __CLR_OR_THIS_CALL do_max_length() const _THROW0()
{ // return maximum length required for a conversion
return ((_Mymode & consume_header) != 0 ? 9 // header + max input
: (_Mymode & generate_header) != 0 ? 7 // header + max output
: 6); // 6-byte max input sequence, no 3-byte header
}
Shouldn't it be 4 instead (to encode any Unicode code point in UTF-8 at most 4 bytes are required)?