Home
last modified time | relevance | path

Searched refs:tokenize (Results 1 – 25 of 87) sorted by relevance

1234

/external/google-breakpad/src/testing/scripts/generator/cpp/
Dast.py46 from cpp import tokenize
549 if parts[-1].token_type == tokenize.NAME:
579 if (type_name and type_name[-1].token_type == tokenize.NAME and
580 p.token_type == tokenize.NAME):
581 type_name.append(tokenize.Token(tokenize.SYNTAX, ' ', 0, 0))
738 if token.token_type == tokenize.NAME:
749 if next.token_type == tokenize.SYNTAX and next.name == '(':
754 syntax = tokenize.SYNTAX
763 new_temp = self._GetTokensUpTo(tokenize.SYNTAX, ';')
766 last_token = tokenize.Token(tokenize.SYNTAX, ';', 0, 0)
[all …]
/external/deqp/framework/randomshaders/
DrsgExpression.hpp57 virtual void tokenize (GeneratorState& state, TokenStream& str) const = DE_NULL;
74 …void tokenize (GeneratorState& state, TokenStream& str) const { DE_UNREF(state); str << Tok… in tokenize() function in rsg::VariableAccess
113 void tokenize (GeneratorState& state, TokenStream& str) const;
131 void tokenize (GeneratorState& state, TokenStream& str) const;
149 void tokenize (GeneratorState& state, TokenStream& str) const;
167 void tokenize (GeneratorState& state, TokenStream& str) const;
189 void tokenize (GeneratorState& state, TokenStream& str) const;
214 void tokenize (GeneratorState& state, TokenStream& str) const;
233 void tokenize (GeneratorState& state, TokenStream& str) const;
255 void tokenize (GeneratorState& state, TokenStream& str) const;
DrsgStatement.hpp44 virtual void tokenize (GeneratorState& state, TokenStream& str) const = DE_NULL;
62 void tokenize (GeneratorState& state, TokenStream& str) const;
78 void tokenize (GeneratorState& state, TokenStream& str) const;
98 void tokenize (GeneratorState& state, TokenStream& str) const;
119 void tokenize (GeneratorState& state, TokenStream& str) const;
145 void tokenize (GeneratorState& state, TokenStream& str) const;
DrsgStatement.cpp203 void BlockStatement::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::BlockStatement
208 (*i)->tokenize(state, str); in tokenize()
219 void ExpressionStatement::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::ExpressionStatement
222 m_expression->tokenize(state, str); in tokenize()
333 void DeclarationStatement::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::DeclarationStatement
340 m_expression->tokenize(state, str); in tokenize()
456 void ConditionalStatement::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::ConditionalStatement
462 m_condition->tokenize(state, str); in tokenize()
469 m_trueStatement->tokenize(state, str); in tokenize()
473 m_trueStatement->tokenize(state, str); in tokenize()
[all …]
DrsgShader.cpp93 void Shader::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::Shader
101 m_globalStatements[ndx]->tokenize(state, str); in tokenize()
107 m_functions[ndx]->tokenize(state, str); in tokenize()
112 m_mainFunction.tokenize(state, str); in tokenize()
125 void Function::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::Function
147 m_functionBlock.tokenize(state, str); in tokenize()
DrsgShader.hpp54 void tokenize (GeneratorState& state, TokenStream& stream) const;
106 void tokenize (GeneratorState& state, TokenStream& str) const;
DrsgExpression.cpp411 void FloatLiteral::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::FloatLiteral
465 void IntLiteral::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::IntLiteral
502 void BoolLiteral::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::BoolLiteral
775 void ConstructorOp::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::ConstructorOp
787 (*i)->tokenize(state, str); in tokenize()
929 void AssignOp::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::AssignOp
931 m_lvalueExpr->tokenize(state, str); in tokenize()
933 m_rvalueExpr->tokenize(state, str); in tokenize()
1246 void ParenOp::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::ParenOp
1249 m_child->tokenize(state, str); in tokenize()
[all …]
DrsgBuiltinFunctions.hpp43 void tokenize (GeneratorState& state, TokenStream& str) const;
95 void UnaryBuiltinVecFunc<GetValueRangeWeight, ComputeValueRange, Evaluate>::tokenize (GeneratorStat… in tokenize() function in rsg::UnaryBuiltinVecFunc
98 m_child->tokenize(state, str); in tokenize()
/external/webrtc/webrtc/base/
Dstringencode_unittest.cc220 EXPECT_EQ(5ul, tokenize("one two three four five", ' ', &fields)); in TEST()
222 EXPECT_EQ(1ul, tokenize("one", ' ', &fields)); in TEST()
226 EXPECT_EQ(5ul, tokenize(" one two three four five ", ' ', &fields)); in TEST()
228 EXPECT_EQ(1ul, tokenize(" one ", ' ', &fields)); in TEST()
230 EXPECT_EQ(0ul, tokenize(" ", ' ', &fields)); in TEST()
237 tokenize("find middle one", ' ', &fields); in TEST()
243 tokenize(" find middle one ", ' ', &fields); in TEST()
247 tokenize(" ", ' ', &fields); in TEST()
267 ASSERT_EQ(0ul, tokenize("D \"A B", ' ', '(', ')', NULL)); in TEST()
270 tokenize("A B C", ' ', '"', '"', &fields); in TEST()
[all …]
Dstringencode.h146 size_t tokenize(const std::string& source, char delimiter,
165 size_t tokenize(const std::string& source, char delimiter, char start_mark,
/external/autotest/utils/
Dreindent.py44 import tokenize
176 tokenize.tokenize(self.getline, self.tokeneater)
258 INDENT=tokenize.INDENT,
259 DEDENT=tokenize.DEDENT,
260 NEWLINE=tokenize.NEWLINE,
261 COMMENT=tokenize.COMMENT,
262 NL=tokenize.NL):
/external/chromium-trace/catapult/third_party/coverage/coverage/
Dphystokens.py11 import tokenize
50 if last_ttype == tokenize.COMMENT:
91 ws_tokens = set([token.INDENT, token.DEDENT, token.NEWLINE, tokenize.NL])
115 tok_class = tokenize.tok_name.get(ttype, 'xx').lower()[:3]
149 self.last_tokens = list(tokenize.generate_tokens(readline))
263 return tokenize.detect_encoding(readline)[0]
Dparser.py10 import tokenize
114 tokenize.tok_name.get(toktype, toktype),
148 if ttext.strip() and toktype != tokenize.COMMENT:
208 except (tokenize.TokenError, IndentationError) as err:
/external/chromium-trace/catapult/common/py_utils/py_utils/refactor/
Doffset_token.py8 import tokenize
66 tokenize_tokens = tokenize.generate_tokens(f.readline)
90 while offset_tokens[0].type == tokenize.NL:
115 return tokenize.untokenize(tokenize_tokens).replace('\\\n', ' \\\n')
Dsnippet.py9 import tokenize
228 tokens[0].type == tokenize.COMMENT or tokens[0].type == tokenize.NL):
/external/minijail/
Dutil.c99 while ((group = tokenize(&constant_str, "|")) != NULL) { in parse_constant()
141 char *tokenize(char **stringp, const char *delim) in tokenize() function
Dutil.h56 char *tokenize(char **stringp, const char *delim);
/external/testng/src/main/java/org/testng/remote/strprotocol/
DMessageHelper.java200 return tokenize(messagePart, PARAM_DELIMITER); in parseParameters()
204 return tokenize(message, DELIMITER); in parseMessage()
207 private static String[] tokenize(final String message, final char separator) { in tokenize() method in MessageHelper
/external/sqlite/android/
Dsqlite3_android.cpp258 static void tokenize(sqlite3_context * context, int argc, sqlite3_value ** argv) in tokenize() function
449 …err = sqlite3_create_function(handle, "_TOKENIZE", 4, SQLITE_UTF16, collator, tokenize, NULL, NULL… in register_localized_collators()
453 …err = sqlite3_create_function(handle, "_TOKENIZE", 5, SQLITE_UTF16, collator, tokenize, NULL, NULL… in register_localized_collators()
457 …err = sqlite3_create_function(handle, "_TOKENIZE", 6, SQLITE_UTF16, collator, tokenize, NULL, NULL… in register_localized_collators()
/external/apache-xml/src/main/java/org/apache/xpath/compiler/
DLexer.java96 void tokenize(String pat) throws javax.xml.transform.TransformerException in tokenize() method in Lexer
98 tokenize(pat, null); in tokenize()
109 void tokenize(String pat, Vector targetStrings) in tokenize() method in Lexer
/external/selinux/libsepol/include/sepol/policydb/
Dutil.h41 extern int tokenize(char *line_buf, char delim, int num_args, ...);
/external/chromium-trace/catapult/third_party/vinn/third_party/parse5/test/fixtures/
Dtokenizer_test.js6 function tokenize(html, initialState, lastStartTag) { function
173 var out = tokenize(test.input, test.initialState, test.lastStartTag);
/external/e2fsprogs/e2fsck/
Ddict.c1227 static int tokenize(char *string, ...) in tokenize() function
1322 if (tokenize(in+1, &tok1, &tok2, (char **) 0) != 2) { in construct()
1393 if (tokenize(in+1, &tok1, &tok2, (char **) 0) != 2) { in main()
1414 if (tokenize(in+1, &tok1, (char **) 0) != 1) { in main()
1436 if (tokenize(in+1, &tok1, (char **) 0) != 1) { in main()
1486 if (tokenize(in+1, &tok1, (char **) 0) != 1) { in main()
1499 if (tokenize(in+1, &tok1, &tok2, (char **) 0) != 2) { in main()
/external/libvpx/libvpx/vp8/
Dvp8cx.mk56 VP8_CX_SRCS-yes += encoder/tokenize.h
68 VP8_CX_SRCS-yes += encoder/tokenize.c
/external/google-breakpad/src/processor/
Dprocessor.gyp128 'tokenize.cc',
129 'tokenize.h',

1234