Home
last modified time | relevance | path

Searched refs:Tokenize (Results 1 – 25 of 43) sorted by relevance

12

/external/libtextclassifier/native/annotator/duration/
Dduration_test.cc112 std::vector<Token> Tokenize(const UnicodeText& text) { in Tokenize() function in libtextclassifier3::__anon8aa146150111::DurationAnnotatorTest
113 return feature_processor_->Tokenize(text); in Tokenize()
152 std::vector<Token> tokens = Tokenize(text); in TEST_F()
171 std::vector<Token> tokens = Tokenize(text); in TEST_F()
190 std::vector<Token> tokens = Tokenize(text); in TEST_F()
209 std::vector<Token> tokens = Tokenize(text); in TEST_F()
228 std::vector<Token> tokens = Tokenize(text); in TEST_F()
247 std::vector<Token> tokens = Tokenize(text); in TEST_F()
266 std::vector<Token> tokens = Tokenize(text); in TEST_F()
286 std::vector<Token> tokens = Tokenize(text); in TEST_F()
[all …]
/external/libtextclassifier/native/utils/
Dtokenizer_test.cc94 std::vector<Token> Tokenize(const std::string& utf8_text) const { in Tokenize() function in libtextclassifier3::__anonf2ccfc700111::TestingTokenizerProxy
95 return tokenizer_->Tokenize(utf8_text); in Tokenize()
178 std::vector<Token> tokens = tokenizer.Tokenize("Hello world!"); in TEST()
213 EXPECT_THAT(tokenizer.Tokenize("앨라배마 주 전화(123) 456-789웹사이트"), in TEST()
351 tokens = tokenizer.Tokenize( in TEST()
355 tokens = tokenizer.Tokenize("問少目 hello 木輸ยามきゃ"); in TEST()
379 std::vector<Token> tokens = tokenizer.Tokenize("พระบาท สมเด็จ พระ ปร มิ"); in TEST()
400 tokenizer.Tokenize("The interval is: -(12, 138*)"); in TEST()
427 std::vector<Token> tokens = tokenizer.Tokenize("3.1 3﹒2 3.3"); in TEST()
445 std::vector<Token> tokens = tokenizer.Tokenize("พระบาทสมเด็จพระปรมิ"); in TEST()
[all …]
Dtokenizer.h93 std::vector<Token> Tokenize(const std::string& text) const;
96 std::vector<Token> Tokenize(const UnicodeText& text_unicode) const;
Dtokenizer.cc93 std::vector<Token> Tokenizer::Tokenize(const std::string& text) const { in Tokenize() function in libtextclassifier3::Tokenizer
95 return Tokenize(text_unicode); in Tokenize()
98 std::vector<Token> Tokenizer::Tokenize(const UnicodeText& text_unicode) const { in Tokenize() function in libtextclassifier3::Tokenizer
/external/perfetto/src/trace_processor/importers/proto/
Dproto_trace_parser_unittest.cc235 util::Status Tokenize() { in Tokenize() function in perfetto::trace_processor::__anon1e8400a00111::ProtoTraceParserTest
305 Tokenize(); in TEST_F()
336 Tokenize(); in TEST_F()
386 Tokenize(); in TEST_F()
453 Tokenize(); in TEST_F()
498 Tokenize(); in TEST_F()
520 Tokenize(); in TEST_F()
539 Tokenize(); in TEST_F()
553 Tokenize(); in TEST_F()
570 Tokenize(); in TEST_F()
[all …]
/external/google-breakpad/src/processor/
Dbasic_source_line_resolver.cc471 if (!Tokenize(file_line, kWhitespace, 2, &tokens)) { in ParseFile()
499 if (!Tokenize(function_line, kWhitespace, 4, &tokens)) { in ParseFunction()
531 if (!Tokenize(line_line, kWhitespace, 4, &tokens)) { in ParseLine()
580 if (!Tokenize(public_line, kWhitespace, 3, &tokens)) { in ParsePublicSymbol()
Dtokenize.h53 bool Tokenize(char *line,
Dtokenize.cc45 bool Tokenize(char *line, in Tokenize() function
Dwindows_frame_info.h126 if (!Tokenize(&buffer[0], " \r\n", 11, &tokens)) in ParseFromString()
/external/perfetto/src/trace_processor/importers/systrace/
Dsystrace_line_tokenizer.h33 util::Status Tokenize(const std::string& line, SystraceLine*);
Dsystrace_line_tokenizer.cc49 util::Status SystraceLineTokenizer::Tokenize(const std::string& buffer, in Tokenize() function in perfetto::trace_processor::SystraceLineTokenizer
/external/libtextclassifier/native/lang_id/
Dcustom-tokenizer.h42 void Tokenize(StringPiece text, LightSentence *sentence) const;
Dcustom-tokenizer.cc102 void TokenizerForLangId::Tokenize(StringPiece text, in Tokenize() function in libtextclassifier3::mobile::lang_id::TokenizerForLangId
/external/tensorflow/tensorflow/lite/testing/
Dtokenize.h37 void Tokenize(std::istream* input, TokenProcessor* processor);
Dmessage.cc91 Tokenize(input, &stack); in Read()
Dtokenize.cc25 void Tokenize(std::istream* input, TokenProcessor* processor) { in Tokenize() function
Dtokenize_test.cc39 Tokenize(&ss, &collector); in TokenizeString()
/external/tensorflow/tensorflow/lite/testing/nnapi_tflite_zip_tests/
Dtokenize.h40 void Tokenize(std::istream* input, TokenProcessor* processor);
Dmessage.cc94 Tokenize(input, &stack); in Read()
Dtokenize.cc26 void Tokenize(std::istream* input, TokenProcessor* processor) { in Tokenize() function
/external/python/cpython2/Misc/NEWS.d/next/Library/
D2018-06-24-01-57-14.bpo-33899.IaOcAr.rst1 Tokenize module now implicitly emits a NEWLINE when provided with input that
/external/libtextclassifier/native/annotator/
Dfeature-processor.h110 std::vector<Token> Tokenize(const std::string& text) const;
113 std::vector<Token> Tokenize(const UnicodeText& text_unicode) const;
/external/libtextclassifier/native/annotator/grammar/
Dgrammar-annotator.cc385 lexer_.Process(text, tokenizer_.Tokenize(text), /*annotations=*/nullptr, in Annotate()
416 lexer_.Process(text, tokenizer_.Tokenize(text), /*annotations=*/nullptr, in SuggestSelection()
449 const std::vector<Token> tokens = tokenizer_.Tokenize(text); in ClassifyText()
/external/chromium-trace/catapult/common/py_utils/py_utils/refactor/
Doffset_token.py60 def Tokenize(f): function
/external/perfetto/src/traced/probes/ftrace/kallsyms/
Dkernel_symbol_map.cc143 void Tokenize(const char* name, Lambda fn) { in Tokenize() function
260 Tokenize(name, [&tokens, &symbols, addr](base::StringView token) { in Parse()

12