Home
last modified time | relevance | path

Searched refs:_tokenize (Results 1 – 8 of 8) sorted by relevance

/external/python/cpython3/Lib/
Dtokenize.py489 return _tokenize(chain(consumed, rl_gen, empty).__next__, encoding)
492 def _tokenize(readline, encoding): function
730 return _tokenize(readline, None)
766 tokens = _tokenize(sys.stdin.readline, None)
Dgettext.py84 def _tokenize(plural): function
177 result, nexttok = _parse(_tokenize(plural))
/external/python/cpython2/Lib/idlelib/
DEditorWindow.py1588 _tokenize = tokenize variable
1614 INDENT=_tokenize.INDENT,
1615 NAME=_tokenize.NAME,
1626 save_tabsize = _tokenize.tabsize
1627 _tokenize.tabsize = self.tabwidth
1630 _tokenize.tokenize(self.readline, self.tokeneater)
1631 except (_tokenize.TokenError, SyntaxError):
1636 _tokenize.tabsize = save_tabsize
/external/python/cpython2/Lib/
Dgettext.py84 def _tokenize(plural): function
177 result, nexttok = _parse(_tokenize(plural))
/external/python/cpython3/Lib/test/
Dtest_tokenize.py2 from tokenize import (tokenize, _tokenize, untokenize, NUMBER, NAME, OP,
/external/python/cpython3/Doc/library/
Dtokenize.rst154 .. _tokenize-cli:
/external/libmojo/third_party/jinja2/
Denvironment.py486 def _tokenize(self, source, name, filename=None, state=None): member in Environment
Dparser.py32 self.stream = environment._tokenize(source, name, filename, state)