Searched refs:utf_16_decode (Results 1 – 5 of 5) sorted by relevance
/external/python/cpython2/Lib/encodings/ |
D | utf_16.py | 16 return codecs.utf_16_decode(input, errors, True)
|
/external/python/cpython3/Lib/encodings/ |
D | utf_16.py | 16 return codecs.utf_16_decode(input, errors, True)
|
/external/python/cpython2/Modules/ |
D | _codecsmodule.c | 279 utf_16_decode(PyObject *self, in utf_16_decode() function 1074 {"utf_16_decode", utf_16_decode, METH_VARARGS},
|
/external/python/cpython2/Lib/test/ |
D | test_codecs.py | 537 codecs.utf_16_decode('\x01', 'replace', True)) 539 codecs.utf_16_decode('\x01', 'ignore', True)) 542 self.assertRaises(UnicodeDecodeError, codecs.utf_16_decode, "\xff", "strict", True)
|
/external/python/cpython3/Lib/test/ |
D | test_codecs.py | 650 codecs.utf_16_decode(b'\x01', 'replace', True)) 652 codecs.utf_16_decode(b'\x01', 'ignore', True)) 655 self.assertRaises(UnicodeDecodeError, codecs.utf_16_decode,
|