• Home
  • Raw
  • Download

Lines Matching refs:token

287         for token in self.alias:
288 if token is not None and name == token.name:
342 for token in token_list:
343 if token.name == node.name:
467 token = tokens[end]
469 if token.name == '<':
471 elif token.name == '>':
508 token = tokens[i]
509 if token.name == '<':
516 elif token.name == ',':
519 elif token.name == '*':
521 elif token.name == '&':
523 elif token.name == '[':
525 elif token.name == ']':
528 name_tokens.append(token)
699 def HandleError(self, msg, token): argument
702 (msg, self.filename, token, printable_queue))
706 token = self._GetNextToken()
707 if not token:
711 self.current_token = token
714 if token.token_type == _INTERNAL_TOKEN:
715 if token.name == _NAMESPACE_POP:
720 result = self._GenerateOne(token)
724 self.HandleError('exception', token)
738 def _GenerateOne(self, token): argument
739 if token.token_type == tokenize.NAME:
740 if (keywords.IsKeyword(token.name) and
741 not keywords.IsBuiltinType(token.name)):
742 method = getattr(self, 'handle_' + token.name)
744 elif token.name == self.in_class_name_only:
751 return self._GetMethod([token], FUNCTION_CTOR, None, True)
758 temp_tokens.insert(0, token)
806 elif token.token_type == tokenize.SYNTAX:
807 if token.name == '~' and self.in_class:
809 token = self._GetNextToken()
812 if (token.token_type == tokenize.NAME and
813 token.name == self.in_class_name_only):
814 return self._GetMethod([token], FUNCTION_DTOR, None, True)
816 elif token.token_type == tokenize.PREPROCESSOR:
819 name = token.name[1:].lstrip()
827 assert name[0] in '<"', token
828 assert name[-1] in '>"', token
831 return Include(token.start, token.end, filename, system)
842 return Define(token.start, token.end, name, value)
862 def _IgnoreUpTo(self, token_type, token): argument
863 unused_tokens = self._GetTokensUpTo(token_type, token)
868 token = self._GetNextToken()
869 if token.token_type != tokenize.PREPROCESSOR:
872 name = token.name[1:].lstrip()
886 token = GetNextToken()
888 if token.token_type == tokenize.SYNTAX:
889 if token.name == open_paren:
891 elif token.name == close_paren:
895 yield token
896 token = GetNextToken()
897 yield token
910 def _AddBackToken(self, token): argument
911 if token.whence == tokenize.WHENCE_STREAM:
912 token.whence = tokenize.WHENCE_QUEUE
913 self.token_queue.insert(0, token)
915 assert token.whence == tokenize.WHENCE_QUEUE, token
916 self.token_queue.append(token)
921 for token in tokens:
922 token.whence = tokenize.WHENCE_QUEUE
963 token = self._GetNextToken()
964 assert token.token_type == tokenize.SYNTAX, token
965 if token.name == '<':
967 template_portion = [token]
969 token = self._GetNextToken()
970 assert token.token_type == tokenize.SYNTAX, token
971 assert token.name == '(', token
1007 token = self._GetNextToken()
1008 assert token.name == '(', token
1012 token = self._GetNextToken()
1013 while token.token_type == tokenize.NAME:
1014 modifier_token = token
1015 token = self._GetNextToken()
1021 assert token.name == '(', token
1024 token = self._GetNextToken()
1027 assert token.name == '(', token
1030 token = self._GetNextToken()
1040 assert token.token_type == tokenize.SYNTAX, token
1042 if token.name == ':':
1044 while token.name != ';' and token.name != '{':
1045 token = self._GetNextToken()
1049 if token.name == '(':
1059 token = self._GetNextToken()
1060 assert token.token_type == tokenize.SYNTAX, token
1061 assert token.name == ';', token
1078 if token.name == '{':
1083 if token.name == '=':
1084 token = self._GetNextToken()
1086 if token.name == 'default' or token.name == 'delete':
1089 token = self._GetNextToken()
1092 assert token.token_type == tokenize.CONSTANT, token
1093 assert token.name == '0', token
1095 token = self._GetNextToken()
1097 if token.name == '[':
1101 token = self._GetNextToken()
1103 assert token.name == ';', (token, return_type_and_name, parameters)
1195 name_tokens, token = self.GetName()
1200 if token.token_type == tokenize.SYNTAX and token.name == ';':
1201 return ctor(token.start, token.end, name, None,
1204 if token.token_type == tokenize.NAME and self._handling_typedef:
1205 self._AddBackToken(token)
1206 return ctor(token.start, token.end, name, None,
1212 if token.token_type == tokenize.SYNTAX and token.name == '{':
1214 new_type = ctor(token.start, token.end, name, fields,
1221 token = next
1224 assert token.token_type == tokenize.NAME, token
1225 return self._CreateVariable(token, token.name, name, [], '', None)
1267 token = self._GetNextToken()
1268 if not (token.token_type == tokenize.NAME and token.name == 'class'):
1269 self._AddBackToken(token)
1294 token = token2 = self._GetNextToken()
1295 if token.name == 'inline':
1300 assert token.token_type == tokenize.NAME or token.name == '::', token
1302 return_type_and_name.insert(0, token)
1303 if token2 is not token:
1353 token = self._GetNextToken()
1354 if (token.token_type == tokenize.NAME and
1355 keywords.IsKeyword(token.name)):
1357 method = getattr(self, 'handle_' + token.name)
1362 tokens = [token]
1374 indices = token
1426 token = self._GetNextToken()
1427 assert token.token_type == tokenize.SYNTAX, token
1428 assert token.name == '<', token
1431 token = self._GetNextToken()
1432 if token.token_type == tokenize.NAME:
1433 if token.name == 'class':
1435 elif token.name == 'struct':
1437 elif token.name == 'friend':
1439 self._AddBackToken(token)
1464 token = self._GetNextToken()
1465 assert token.token_type == tokenize.NAME, token
1467 if token.name not in ('public', 'protected', 'private'):
1471 self._AddBackToken(token)
1474 token = self._GetNextToken()
1475 if token.name != 'virtual':
1476 self._AddBackToken(token)
1486 token = next_token
1490 return bases, token
1497 token = class_token
1506 name_tokens, token = self.GetName()
1509 if token.token_type == tokenize.SYNTAX:
1510 if token.name == ';':
1515 if token.name in '*&':
1524 modifiers, token.name, None)
1527 tokens = (class_token, token, name_token, next_token)
1530 if token.name == ':':
1531 bases, token = self._GetBases()
1534 if token.token_type == tokenize.SYNTAX and token.name == '{':
1535 assert token.token_type == tokenize.SYNTAX, token
1536 assert token.name == '{', token
1543 token = self._GetNextToken()
1544 if token.token_type != tokenize.NAME:
1545 assert token.token_type == tokenize.SYNTAX, token
1546 assert token.name == ';', token
1554 token.name, new_class,
1555 modifiers, token.name, None)
1558 self.HandleError('non-typedef token', token)
1559 self._AddBackToken(token)
1565 token = self._GetNextToken()
1568 if token.token_type == tokenize.NAME:
1569 name = token.name
1570 token = self._GetNextToken()
1572 assert token.token_type == tokenize.SYNTAX, token
1576 internal_token.whence = token.whence
1577 if token.name == '=':
1583 assert token.name == '{', token
1620 token = self._GetNextToken()
1621 assert token.token_type == tokenize.SYNTAX
1622 assert token.name == ':'