Searched defs:badchar (Results 1 – 4 of 4) sorted by relevance
1648 int badchar = (int)PyUnicode_AS_UNICODE(uself->object)[uself->start]; in UnicodeEncodeError_str() local1833 int badchar = (int)PyUnicode_AS_UNICODE(uself->object)[uself->start]; in UnicodeTranslateError_str() local
510 int badchar = 0; in decoding_fgets() local
589 int badchar = 0; in decoding_fgets() local
1914 Py_UCS4 badchar = PyUnicode_ReadChar(uself->object, uself->start); in UnicodeEncodeError_str() local2129 Py_UCS4 badchar = PyUnicode_ReadChar(uself->object, uself->start); in UnicodeTranslateError_str() local