Home
last modified time | relevance | path

Searched refs:Tokenize (Results 1 – 18 of 18) sorted by relevance

/external/vixl/src/aarch64/
Ddebugger-aarch64.cc63 static Token* Tokenize(const char* arg);
100 static Token* Tokenize(const char* arg);
122 static Token* Tokenize(const char* arg);
148 static Token* Tokenize(const char* arg);
166 static Token* Tokenize(const char* arg);
183 static Token* Tokenize(const char* arg);
216 static Token* Tokenize(const char* arg);
831 Token* Token::Tokenize(const char* arg) { in Tokenize() function in vixl::aarch64::Token
839 Token* token = RegisterToken::Tokenize(arg); in Tokenize()
844 token = FPRegisterToken::Tokenize(arg); in Tokenize()
[all …]
/external/libtextclassifier/tests/
Dfeature-processor_test.cc224 std::vector<Token> tokens = feature_processor.Tokenize("one, two, three"); in TEST()
257 tokens = feature_processor3.Tokenize("zero, one, two, three, four"); in TEST()
365 1, feature_processor.Tokenize("aaa bbb ccc")), in TEST()
368 1, feature_processor.Tokenize("aaa bbb ěěě")), in TEST()
371 1, feature_processor.Tokenize("ěěě řřř ěěě")), in TEST()
538 std::vector<Token> tokens = feature_processor.Tokenize("พระบาทสมเด็จพระปรมิ"); in TEST()
557 feature_processor.Tokenize("พระบาท สมเด็จ พระ ปร มิ"); in TEST()
601 std::vector<Token> tokens = feature_processor.Tokenize( in TEST()
Dtokenizer_test.cc105 std::vector<Token> tokens = tokenizer.Tokenize("Hello world!"); in TEST()
238 tokens = tokenizer.Tokenize( in TEST()
242 tokens = tokenizer.Tokenize("問少目 hello 木輸ยามきゃ"); in TEST()
/external/google-breakpad/src/processor/
Dbasic_source_line_resolver.cc471 if (!Tokenize(file_line, kWhitespace, 2, &tokens)) { in ParseFile()
499 if (!Tokenize(function_line, kWhitespace, 4, &tokens)) { in ParseFunction()
531 if (!Tokenize(line_line, kWhitespace, 4, &tokens)) { in ParseLine()
580 if (!Tokenize(public_line, kWhitespace, 3, &tokens)) { in ParsePublicSymbol()
Dtokenize.h53 bool Tokenize(char *line,
Dtokenize.cc45 bool Tokenize(char *line, in Tokenize() function
Dwindows_frame_info.h126 if (!Tokenize(&buffer[0], " \r\n", 11, &tokens)) in ParseFromString()
/external/libtextclassifier/smartselect/
Dtokenizer.h39 std::vector<Token> Tokenize(const std::string& utf8_text) const;
Dfeature-processor.cc183 std::vector<Token> FeatureProcessor::Tokenize( in Tokenize() function in libtextclassifier::FeatureProcessor
187 return tokenizer_.Tokenize(utf8_text); in Tokenize()
204 return tokenizer_.Tokenize(utf8_text); in Tokenize()
546 *tokens = Tokenize(context); in TokenizeAndFindClick()
760 std::vector<Token> tokens = tokenizer_.Tokenize(text); in TokenizeSubstring()
Dtokenizer.cc67 std::vector<Token> Tokenizer::Tokenize(const std::string& utf8_text) const { in Tokenize() function in libtextclassifier::Tokenizer
Dfeature-processor.h114 std::vector<Token> Tokenize(const std::string& utf8_text) const;
/external/chromium-trace/catapult/common/py_utils/py_utils/refactor/
Doffset_token.py55 def Tokenize(f): function
Dsnippet.py206 tokens = offset_token.Tokenize(f)
/external/vulkan-validation-layers/tests/gtest-1.7.0/scripts/
Dpump.py382 def Tokenize(s): function
579 tokens = list(Tokenize(pump_src_text))
/external/v8/testing/gtest/scripts/
Dpump.py382 def Tokenize(s): function
579 tokens = list(Tokenize(pump_src_text))
/external/googletest/googletest/scripts/
Dpump.py382 def Tokenize(s): function
579 tokens = list(Tokenize(pump_src_text))
/external/google-breakpad/src/testing/gtest/scripts/
Dpump.py382 def Tokenize(s): function
579 tokens = list(Tokenize(pump_src_text))
/external/protobuf/gtest/scripts/
Dpump.py376 def Tokenize(s): function
571 for token in Tokenize(s):