Home
last modified time | relevance | path

Searched refs:tokenize (Results 1 – 25 of 274) sorted by relevance

1234567891011

/external/python/cpython3/Parser/pgen/
Dmetaparser.py4 import tokenize # from stdlib
13 tokenize.NAME: "NAME",
14 tokenize.STRING: "STRING",
15 tokenize.NEWLINE: "NEWLINE",
16 tokenize.NL: "NL",
17 tokenize.OP: "OP",
18 tokenize.ENDMARKER: "ENDMARKER",
19 tokenize.COMMENT: "COMMENT",
25 self.generator = tokenize.generate_tokens(grammar_adaptor.readline)
32 while self.type != tokenize.ENDMARKER:
[all …]
/external/minijail/
Dutil_unittest.cc40 TEST(tokenize, null_stringp) { in TEST() argument
41 ASSERT_EQ(nullptr, tokenize(nullptr, nullptr)); in TEST()
42 ASSERT_EQ(nullptr, tokenize(nullptr, "")); in TEST()
43 ASSERT_EQ(nullptr, tokenize(nullptr, ",")); in TEST()
46 ASSERT_EQ(nullptr, tokenize(&p, nullptr)); in TEST()
50 TEST(tokenize, null_delim) { in TEST() argument
53 ASSERT_EQ(str, tokenize(&p, nullptr)); in TEST()
58 ASSERT_EQ(str, tokenize(&p, "")); in TEST()
64 TEST(tokenize, basic) { in TEST() argument
67 ASSERT_EQ("a", std::string(tokenize(&p, ","))); in TEST()
[all …]
/external/python/cpython3/Tools/peg_generator/pegen/
Dtokenizer.py2 import tokenize
10 def shorttok(tok: tokenize.TokenInfo) -> str:
20 _tokens: List[tokenize.TokenInfo]
22 def __init__(self, tokengen: Iterator[tokenize.TokenInfo], *, verbose: bool = False):
30 def getnext(self) -> tokenize.TokenInfo:
35 if tok.type in (tokenize.NL, tokenize.COMMENT):
47 def peek(self) -> tokenize.TokenInfo:
51 if tok.type in (tokenize.NL, tokenize.COMMENT):
58 def diagnose(self) -> tokenize.TokenInfo:
/external/googletest/googlemock/scripts/generator/cpp/
Dast.py44 from cpp import tokenize
549 if parts[-1].token_type == tokenize.NAME:
579 if (type_name and type_name[-1].token_type == tokenize.NAME and
580 p.token_type == tokenize.NAME):
581 type_name.append(tokenize.Token(tokenize.SYNTAX, ' ', 0, 0))
749 if token.token_type == tokenize.NAME:
768 if next.token_type == tokenize.SYNTAX and next.name == '(':
773 syntax = tokenize.SYNTAX
783 new_temp = self._GetTokensUpTo(tokenize.SYNTAX, ';')
786 last_token = tokenize.Token(tokenize.SYNTAX, ';', 0, 0)
[all …]
/external/python/cpython3/Doc/library/
Dtokenize.rst1 :mod:`tokenize` --- Tokenizer for Python source
4 .. module:: tokenize
10 **Source code:** :source:`Lib/tokenize.py`
14 The :mod:`tokenize` module provides a lexical scanner for Python source code,
23 :term:`named tuple` returned from :func:`tokenize.tokenize`.
30 .. function:: tokenize(readline)
32 The :func:`.tokenize` generator requires one argument, *readline*, which
57 :func:`.tokenize` determines the source encoding of the file by looking for a
64 Like :func:`.tokenize`, the *readline* argument is a callable returning
69 :func:`.tokenize`. It does not yield an :data:`~token.ENCODING` token.
[all …]
/external/python/cpython2/Tools/scripts/
Dcheckappend.py39 import tokenize
106 tokenize.tokenize(self.file.readline, self.tokeneater)
107 except tokenize.TokenError, msg:
113 NEWLINE=tokenize.NEWLINE,
114 JUNK=(tokenize.COMMENT, tokenize.NL),
115 OP=tokenize.OP,
116 NAME=tokenize.NAME):
Dcleanfuture.py42 import tokenize
157 STRING = tokenize.STRING
158 NL = tokenize.NL
159 NEWLINE = tokenize.NEWLINE
160 COMMENT = tokenize.COMMENT
161 NAME = tokenize.NAME
162 OP = tokenize.OP
165 get = tokenize.generate_tokens(self.getline).next
Dreindent.py44 import tokenize
206 tokenize.tokenize(self.getline, self.tokeneater)
288 INDENT=tokenize.INDENT,
289 DEDENT=tokenize.DEDENT,
290 NEWLINE=tokenize.NEWLINE,
291 COMMENT=tokenize.COMMENT,
292 NL=tokenize.NL):
/external/python/cpython3/Tools/i18n/
Dpygettext.py166 import tokenize
334 if ttype == tokenize.STRING and is_literal_string(tstring):
337 elif ttype not in (tokenize.COMMENT, tokenize.NL):
341 if ttype == tokenize.NAME and tstring in ('class', 'def'):
344 if ttype == tokenize.NAME and tstring in opts.keywords:
349 if ttype == tokenize.OP:
360 if ttype == tokenize.STRING and is_literal_string(tstring):
363 elif ttype not in (tokenize.NEWLINE, tokenize.INDENT,
364 tokenize.COMMENT):
369 if ttype == tokenize.OP and tstring == '(':
[all …]
/external/python/cpython2/Tools/i18n/
Dpygettext.py165 import tokenize
369 if ttype == tokenize.STRING:
372 elif ttype not in (tokenize.COMMENT, tokenize.NL):
376 if ttype == tokenize.NAME and tstring in ('class', 'def'):
379 if ttype == tokenize.NAME and tstring in opts.keywords:
384 if ttype == tokenize.OP and tstring == ':':
389 if ttype == tokenize.STRING:
392 elif ttype not in (tokenize.NEWLINE, tokenize.INDENT,
393 tokenize.COMMENT):
398 if ttype == tokenize.OP and tstring == '(':
[all …]
/external/llvm-project/clang-tools-extra/unittests/clang-include-fixer/
DFuzzySymbolIndexTests.cpp22 EXPECT_THAT(FuzzySymbolIndex::tokenize("URLHandlerCallback"), in TEST()
24 EXPECT_THAT(FuzzySymbolIndex::tokenize("snake_case11"), in TEST()
26 EXPECT_THAT(FuzzySymbolIndex::tokenize("__$42!!BOB\nbob"), in TEST()
37 auto Tokens = FuzzySymbolIndex::tokenize(Identifier);
45 return FuzzySymbolIndex::queryRegexp(FuzzySymbolIndex::tokenize(query)); in TEST()
/external/webrtc/rtc_base/
Dstring_encode_unittest.cc141 EXPECT_EQ(5ul, tokenize("one two three four five", ' ', &fields)); in TEST()
143 EXPECT_EQ(1ul, tokenize("one", ' ', &fields)); in TEST()
147 EXPECT_EQ(5ul, tokenize(" one two three four five ", ' ', &fields)); in TEST()
149 EXPECT_EQ(1ul, tokenize(" one ", ' ', &fields)); in TEST()
151 EXPECT_EQ(0ul, tokenize(" ", ' ', &fields)); in TEST()
158 tokenize("find middle one", ' ', &fields); in TEST()
164 tokenize(" find middle one ", ' ', &fields); in TEST()
168 tokenize(" ", ' ', &fields); in TEST()
188 ASSERT_EQ(0ul, tokenize("D \"A B", ' ', '(', ')', nullptr)); in TEST()
191 tokenize("A B C", ' ', '"', '"', &fields); in TEST()
[all …]
/external/deqp/framework/randomshaders/
DrsgStatement.hpp44 virtual void tokenize (GeneratorState& state, TokenStream& str) const = DE_NULL;
62 void tokenize (GeneratorState& state, TokenStream& str) const;
78 void tokenize (GeneratorState& state, TokenStream& str) const;
98 void tokenize (GeneratorState& state, TokenStream& str) const;
119 void tokenize (GeneratorState& state, TokenStream& str) const;
145 void tokenize (GeneratorState& state, TokenStream& str) const;
DrsgExpression.hpp57 virtual void tokenize (GeneratorState& state, TokenStream& str) const = DE_NULL;
74 …void tokenize (GeneratorState& state, TokenStream& str) const { DE_UNREF(state); str << Tok… in tokenize() function in rsg::VariableAccess
114 void tokenize (GeneratorState& state, TokenStream& str) const;
132 void tokenize (GeneratorState& state, TokenStream& str) const;
151 void tokenize (GeneratorState& state, TokenStream& str) const;
169 void tokenize (GeneratorState& state, TokenStream& str) const;
191 void tokenize (GeneratorState& state, TokenStream& str) const;
216 void tokenize (GeneratorState& state, TokenStream& str) const;
236 void tokenize (GeneratorState& state, TokenStream& str) const;
258 void tokenize (GeneratorState& state, TokenStream& str) const;
DrsgStatement.cpp203 void BlockStatement::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::BlockStatement
208 (*i)->tokenize(state, str); in tokenize()
219 void ExpressionStatement::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::ExpressionStatement
222 m_expression->tokenize(state, str); in tokenize()
333 void DeclarationStatement::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::DeclarationStatement
340 m_expression->tokenize(state, str); in tokenize()
456 void ConditionalStatement::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::ConditionalStatement
462 m_condition->tokenize(state, str); in tokenize()
469 m_trueStatement->tokenize(state, str); in tokenize()
473 m_trueStatement->tokenize(state, str); in tokenize()
[all …]
DrsgShader.cpp93 void Shader::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::Shader
101 m_globalStatements[ndx]->tokenize(state, str); in tokenize()
107 m_functions[ndx]->tokenize(state, str); in tokenize()
112 m_mainFunction.tokenize(state, str); in tokenize()
125 void Function::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::Function
147 m_functionBlock.tokenize(state, str); in tokenize()
/external/python/cpython3/Lib/
Dtabnanny.py25 import tokenize
26 if not hasattr(tokenize, 'NL'):
98 f = tokenize.open(file)
107 process_tokens(tokenize.generate_tokens(f.readline))
109 except tokenize.TokenError as msg:
278 INDENT = tokenize.INDENT
279 DEDENT = tokenize.DEDENT
280 NEWLINE = tokenize.NEWLINE
281 JUNK = tokenize.COMMENT, tokenize.NL
/external/python/cpython2/Doc/library/
Dtokenize.rst1 :mod:`tokenize` --- Tokenizer for Python source
4 .. module:: tokenize
9 **Source code:** :source:`Lib/tokenize.py`
13 The :mod:`tokenize` module provides a lexical scanner for Python source code,
22 :func:`tokenize.generate_tokens` for the character sequence that identifies a
48 .. function:: tokenize(readline[, tokeneater])
50 The :func:`.tokenize` function accepts two parameters: one representing the input
51 stream, and one providing an output mechanism for :func:`.tokenize`.
67 :mod:`tokenize`, as are two additional token type values that might be passed to
68 the *tokeneater* function by :func:`.tokenize`:
[all …]
/external/python/cpython2/Lib/
Dtabnanny.py26 import tokenize
27 if not hasattr(tokenize, 'NL'):
106 process_tokens(tokenize.generate_tokens(f.readline))
108 except tokenize.TokenError, msg:
274 INDENT = tokenize.INDENT
275 DEDENT = tokenize.DEDENT
276 NEWLINE = tokenize.NEWLINE
277 JUNK = tokenize.COMMENT, tokenize.NL
/external/tensorflow/tensorflow/python/autograph/pyct/
Dparser.py29 import tokenize
70 token_gen = tokenize.generate_tokens(six.StringIO(code_string).readline)
77 except tokenize.TokenError:
85 if tok_type == tokenize.INDENT:
90 tokenize.NL, tokenize.NEWLINE, tokenize.STRING, tokenize.COMMENT):
101 if tok_type == tokenize.INDENT:
113 new_code = tokenize.untokenize(tokens)
/external/autotest/utils/
Dreindent.py44 import tokenize
176 tokenize.tokenize(self.getline, self.tokeneater)
258 INDENT=tokenize.INDENT,
259 DEDENT=tokenize.DEDENT,
260 NEWLINE=tokenize.NEWLINE,
261 COMMENT=tokenize.COMMENT,
262 NL=tokenize.NL):
/external/python/cpython3/Tools/scripts/
Dhighlight.py11 import tokenize
35 tok_type = tokenize.COMMENT
37 for tok in tokenize.generate_tokens(readline):
41 if tok_type == tokenize.COMMENT:
43 elif tok_type == tokenize.OP and tok_str[:1] not in '{}[](),.:;@':
45 elif tok_type == tokenize.STRING:
47 if prev_tok_type == tokenize.INDENT or scol==0:
49 elif tok_type == tokenize.NAME:
Dcleanfuture.py42 import tokenize
156 STRING = tokenize.STRING
157 NL = tokenize.NL
158 NEWLINE = tokenize.NEWLINE
159 COMMENT = tokenize.COMMENT
160 NAME = tokenize.NAME
161 OP = tokenize.OP
164 get = tokenize.generate_tokens(self.getline).__next__
Dreindent.py46 import tokenize
122 encoding, _ = tokenize.detect_encoding(f.readline)
202 tokens = tokenize.generate_tokens(self.getline)
286 INDENT=tokenize.INDENT,
287 DEDENT=tokenize.DEDENT,
288 NEWLINE=tokenize.NEWLINE,
289 COMMENT=tokenize.COMMENT,
290 NL=tokenize.NL):
/external/python/pyfakefs/pyfakefs/
Dpytest_plugin.py13 import tokenize
29 Patcher.SKIPMODULES.add(tokenize)
41 tokenize._builtin_open = patcher.original_open

1234567891011