/external/googletest/googlemock/scripts/generator/cpp/ |
D | ast.py | 46 from cpp import tokenize 551 if parts[-1].token_type == tokenize.NAME: 581 if (type_name and type_name[-1].token_type == tokenize.NAME and 582 p.token_type == tokenize.NAME): 583 type_name.append(tokenize.Token(tokenize.SYNTAX, ' ', 0, 0)) 739 if token.token_type == tokenize.NAME: 750 if next.token_type == tokenize.SYNTAX and next.name == '(': 755 syntax = tokenize.SYNTAX 764 new_temp = self._GetTokensUpTo(tokenize.SYNTAX, ';') 767 last_token = tokenize.Token(tokenize.SYNTAX, ';', 0, 0) [all …]
|
/external/v8/testing/gmock/scripts/generator/cpp/ |
D | ast.py | 46 from cpp import tokenize 551 if parts[-1].token_type == tokenize.NAME: 581 if (type_name and type_name[-1].token_type == tokenize.NAME and 582 p.token_type == tokenize.NAME): 583 type_name.append(tokenize.Token(tokenize.SYNTAX, ' ', 0, 0)) 739 if token.token_type == tokenize.NAME: 750 if next.token_type == tokenize.SYNTAX and next.name == '(': 755 syntax = tokenize.SYNTAX 764 new_temp = self._GetTokensUpTo(tokenize.SYNTAX, ';') 767 last_token = tokenize.Token(tokenize.SYNTAX, ';', 0, 0) [all …]
|
/external/google-breakpad/src/testing/scripts/generator/cpp/ |
D | ast.py | 46 from cpp import tokenize 549 if parts[-1].token_type == tokenize.NAME: 579 if (type_name and type_name[-1].token_type == tokenize.NAME and 580 p.token_type == tokenize.NAME): 581 type_name.append(tokenize.Token(tokenize.SYNTAX, ' ', 0, 0)) 738 if token.token_type == tokenize.NAME: 749 if next.token_type == tokenize.SYNTAX and next.name == '(': 754 syntax = tokenize.SYNTAX 763 new_temp = self._GetTokensUpTo(tokenize.SYNTAX, ';') 766 last_token = tokenize.Token(tokenize.SYNTAX, ';', 0, 0) [all …]
|
/external/python/cpython2/Tools/scripts/ |
D | checkappend.py | 39 import tokenize 106 tokenize.tokenize(self.file.readline, self.tokeneater) 107 except tokenize.TokenError, msg: 113 NEWLINE=tokenize.NEWLINE, 114 JUNK=(tokenize.COMMENT, tokenize.NL), 115 OP=tokenize.OP, 116 NAME=tokenize.NAME):
|
D | cleanfuture.py | 42 import tokenize 157 STRING = tokenize.STRING 158 NL = tokenize.NL 159 NEWLINE = tokenize.NEWLINE 160 COMMENT = tokenize.COMMENT 161 NAME = tokenize.NAME 162 OP = tokenize.OP 165 get = tokenize.generate_tokens(self.getline).next
|
D | reindent.py | 44 import tokenize 188 tokenize.tokenize(self.getline, self.tokeneater) 270 INDENT=tokenize.INDENT, 271 DEDENT=tokenize.DEDENT, 272 NEWLINE=tokenize.NEWLINE, 273 COMMENT=tokenize.COMMENT, 274 NL=tokenize.NL):
|
D | finddiv.py | 21 import tokenize 58 g = tokenize.generate_tokens(fp.readline)
|
/external/python/cpython2/Tools/i18n/ |
D | pygettext.py | 165 import tokenize 375 if ttype == tokenize.STRING: 378 elif ttype not in (tokenize.COMMENT, tokenize.NL): 382 if ttype == tokenize.NAME and tstring in ('class', 'def'): 385 if ttype == tokenize.NAME and tstring in opts.keywords: 390 if ttype == tokenize.OP and tstring == ':': 395 if ttype == tokenize.STRING: 398 elif ttype not in (tokenize.NEWLINE, tokenize.INDENT, 399 tokenize.COMMENT): 404 if ttype == tokenize.OP and tstring == '(': [all …]
|
/external/deqp/framework/randomshaders/ |
D | rsgExpression.hpp | 57 virtual void tokenize (GeneratorState& state, TokenStream& str) const = DE_NULL; 74 …void tokenize (GeneratorState& state, TokenStream& str) const { DE_UNREF(state); str << Tok… in tokenize() function in rsg::VariableAccess 113 void tokenize (GeneratorState& state, TokenStream& str) const; 131 void tokenize (GeneratorState& state, TokenStream& str) const; 149 void tokenize (GeneratorState& state, TokenStream& str) const; 167 void tokenize (GeneratorState& state, TokenStream& str) const; 189 void tokenize (GeneratorState& state, TokenStream& str) const; 214 void tokenize (GeneratorState& state, TokenStream& str) const; 233 void tokenize (GeneratorState& state, TokenStream& str) const; 255 void tokenize (GeneratorState& state, TokenStream& str) const;
|
D | rsgStatement.hpp | 44 virtual void tokenize (GeneratorState& state, TokenStream& str) const = DE_NULL; 62 void tokenize (GeneratorState& state, TokenStream& str) const; 78 void tokenize (GeneratorState& state, TokenStream& str) const; 98 void tokenize (GeneratorState& state, TokenStream& str) const; 119 void tokenize (GeneratorState& state, TokenStream& str) const; 145 void tokenize (GeneratorState& state, TokenStream& str) const;
|
D | rsgStatement.cpp | 203 void BlockStatement::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::BlockStatement 208 (*i)->tokenize(state, str); in tokenize() 219 void ExpressionStatement::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::ExpressionStatement 222 m_expression->tokenize(state, str); in tokenize() 333 void DeclarationStatement::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::DeclarationStatement 340 m_expression->tokenize(state, str); in tokenize() 456 void ConditionalStatement::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::ConditionalStatement 462 m_condition->tokenize(state, str); in tokenize() 469 m_trueStatement->tokenize(state, str); in tokenize() 473 m_trueStatement->tokenize(state, str); in tokenize() [all …]
|
D | rsgShader.cpp | 93 void Shader::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::Shader 101 m_globalStatements[ndx]->tokenize(state, str); in tokenize() 107 m_functions[ndx]->tokenize(state, str); in tokenize() 112 m_mainFunction.tokenize(state, str); in tokenize() 125 void Function::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::Function 147 m_functionBlock.tokenize(state, str); in tokenize()
|
D | rsgShader.hpp | 54 void tokenize (GeneratorState& state, TokenStream& stream) const; 106 void tokenize (GeneratorState& state, TokenStream& str) const;
|
/external/webrtc/webrtc/base/ |
D | stringencode_unittest.cc | 220 EXPECT_EQ(5ul, tokenize("one two three four five", ' ', &fields)); in TEST() 222 EXPECT_EQ(1ul, tokenize("one", ' ', &fields)); in TEST() 226 EXPECT_EQ(5ul, tokenize(" one two three four five ", ' ', &fields)); in TEST() 228 EXPECT_EQ(1ul, tokenize(" one ", ' ', &fields)); in TEST() 230 EXPECT_EQ(0ul, tokenize(" ", ' ', &fields)); in TEST() 237 tokenize("find middle one", ' ', &fields); in TEST() 243 tokenize(" find middle one ", ' ', &fields); in TEST() 247 tokenize(" ", ' ', &fields); in TEST() 267 ASSERT_EQ(0ul, tokenize("D \"A B", ' ', '(', ')', NULL)); in TEST() 270 tokenize("A B C", ' ', '"', '"', &fields); in TEST() [all …]
|
/external/python/cpython2/Doc/library/ |
D | tokenize.rst | 1 :mod:`tokenize` --- Tokenizer for Python source 4 .. module:: tokenize 9 **Source code:** :source:`Lib/tokenize.py` 13 The :mod:`tokenize` module provides a lexical scanner for Python source code, 22 :func:`tokenize.generate_tokens` for the character sequence that identifies a 48 .. function:: tokenize(readline[, tokeneater]) 50 The :func:`.tokenize` function accepts two parameters: one representing the input 51 stream, and one providing an output mechanism for :func:`.tokenize`. 67 :mod:`tokenize`, as are two additional token type values that might be passed to 68 the *tokeneater* function by :func:`.tokenize`: [all …]
|
D | tabnanny.rst | 57 :func:`tokenize.tokenize`. 65 Module :mod:`tokenize`
|
/external/python/cpython2/Lib/ |
D | tabnanny.py | 26 import tokenize 27 if not hasattr(tokenize, 'NL'): 106 process_tokens(tokenize.generate_tokens(f.readline)) 108 except tokenize.TokenError, msg: 274 INDENT = tokenize.INDENT 275 DEDENT = tokenize.DEDENT 276 NEWLINE = tokenize.NEWLINE 277 JUNK = tokenize.COMMENT, tokenize.NL
|
/external/autotest/utils/ |
D | reindent.py | 44 import tokenize 176 tokenize.tokenize(self.getline, self.tokeneater) 258 INDENT=tokenize.INDENT, 259 DEDENT=tokenize.DEDENT, 260 NEWLINE=tokenize.NEWLINE, 261 COMMENT=tokenize.COMMENT, 262 NL=tokenize.NL):
|
/external/swiftshader/third_party/LLVM/lib/MC/MCDisassembler/ |
D | EDInst.cpp | 165 int EDInst::tokenize() { in tokenize() function in EDInst 175 return TokenizeResult.setResult(EDToken::tokenize(Tokens, in tokenize() 183 if (tokenize()) in numTokens() 189 if (tokenize()) in getToken() 197 if (tokenize()) in visitTokens()
|
/external/chromium-trace/catapult/common/py_utils/py_utils/refactor/ |
D | offset_token.py | 8 import tokenize 66 tokenize_tokens = tokenize.generate_tokens(f.readline) 90 while offset_tokens[0].type == tokenize.NL: 115 return tokenize.untokenize(tokenize_tokens).replace('\\\n', ' \\\n')
|
D | snippet.py | 9 import tokenize 228 tokens[0].type == tokenize.COMMENT or tokens[0].type == tokenize.NL):
|
/external/python/cpython2/Lib/lib2to3/pgen2/ |
D | driver.py | 26 from . import grammar, parse, token, tokenize, pgen 59 if type in (tokenize.COMMENT, tokenize.NL): 88 tokens = tokenize.generate_tokens(stream.readline) 105 tokens = tokenize.generate_tokens(StringIO.StringIO(text).readline)
|
/external/swiftshader/third_party/subzero/src/ |
D | IceRangeSpec.cpp | 51 auto Tokens = RangeSpec::tokenize(Token, RangeSpec::DELIM_RANGE); in getRange() 97 std::vector<std::string> RangeSpec::tokenize(const std::string &Spec, in tokenize() function in Ice::RangeSpec 115 auto Tokens = tokenize(Spec, DELIM_LIST); in init()
|
/external/python/cpython2/Lib/idlelib/ |
D | ScriptBinding.py | 24 import tokenize 72 tabnanny.process_tokens(tokenize.generate_tokens(f.readline)) 73 except tokenize.TokenError as msg:
|
/external/testng/src/main/java/org/testng/remote/strprotocol/ |
D | MessageHelper.java | 200 return tokenize(messagePart, PARAM_DELIMITER); in parseParameters() 204 return tokenize(message, DELIMITER); in parseMessage() 207 private static String[] tokenize(final String message, final char separator) { in tokenize() method in MessageHelper
|