Lines Matching refs:tokenize
167 import tokenize
335 if ttype == tokenize.STRING and is_literal_string(tstring):
338 elif ttype not in (tokenize.COMMENT, tokenize.NL):
342 if ttype == tokenize.NAME and tstring in ('class', 'def'):
345 if ttype == tokenize.NAME and tstring in opts.keywords:
348 if ttype == tokenize.STRING:
402 if ttype == tokenize.OP:
413 if ttype == tokenize.STRING and is_literal_string(tstring):
416 elif ttype not in (tokenize.NEWLINE, tokenize.INDENT,
417 tokenize.COMMENT):
422 if ttype == tokenize.OP and tstring == '(':
430 if ttype == tokenize.OP and tstring == ')':
438 elif ttype == tokenize.STRING and is_literal_string(tstring):
440 elif ttype not in [tokenize.COMMENT, token.INDENT, token.DEDENT,
441 token.NEWLINE, tokenize.NL]:
651 tokens = tokenize.tokenize(fp.readline)
654 except tokenize.TokenError as e: