Home
last modified time | relevance | path

Searched refs:Tokenize (Results 1 – 25 of 96) sorted by relevance

1234

/external/libtextclassifier/native/annotator/duration/
Dduration_test.cc133 std::vector<Token> Tokenize(const UnicodeText& text) { in Tokenize() function in libtextclassifier3::__anon5ab10c4f0111::DurationAnnotatorTest
134 return feature_processor_->Tokenize(text); in Tokenize()
194 std::vector<Token> tokens = Tokenize(text); in TEST_F()
214 std::vector<Token> tokens = Tokenize(text); in TEST_F()
226 std::vector<Token> tokens = Tokenize(text); in TEST_F()
246 std::vector<Token> tokens = Tokenize(text); in TEST_F()
266 std::vector<Token> tokens = Tokenize(text); in TEST_F()
286 std::vector<Token> tokens = Tokenize(text); in TEST_F()
306 std::vector<Token> tokens = Tokenize(text); in TEST_F()
326 std::vector<Token> tokens = Tokenize(text); in TEST_F()
[all …]
/external/libtextclassifier/native/utils/
Dtokenizer_test.cc94 std::vector<Token> Tokenize(const std::string& utf8_text) const { in Tokenize() function in libtextclassifier3::__anonebae1baa0111::TestingTokenizerProxy
95 return tokenizer_->Tokenize(utf8_text); in Tokenize()
178 std::vector<Token> tokens = tokenizer.Tokenize("Hello world!"); in TEST()
213 EXPECT_THAT(tokenizer.Tokenize("앨라배마 주 전화(123) 456-789웹사이트"), in TEST()
351 tokens = tokenizer.Tokenize( in TEST()
355 tokens = tokenizer.Tokenize("問少目 hello 木輸ยามきゃ"); in TEST()
379 std::vector<Token> tokens = tokenizer.Tokenize("พระบาท สมเด็จ พระ ปร มิ"); in TEST()
400 tokenizer.Tokenize("The interval is: -(12, 138*)"); in TEST()
427 std::vector<Token> tokens = tokenizer.Tokenize("3.1 3﹒2 3.3"); in TEST()
445 std::vector<Token> tokens = tokenizer.Tokenize("พระบาทสมเด็จพระปรมิ"); in TEST()
[all …]
Dtokenizer.h94 std::vector<Token> Tokenize(absl::string_view text) const;
97 std::vector<Token> Tokenize(const UnicodeText& text_unicode) const;
Dbert_tokenizer_test.cc33 auto results = tokenizer->Tokenize("i'm question"); in AssertTokenizerResults()
69 auto results = tokenizer->Tokenize("i'm questionansweraskask"); in TEST()
112 auto results = tokenizer->Tokenize("i'm questionansweraskask"); in TEST()
Dtokenizer.cc99 std::vector<Token> Tokenizer::Tokenize(absl::string_view text) const { in Tokenize() function in libtextclassifier3::Tokenizer
101 return Tokenize(text_unicode); in Tokenize()
104 std::vector<Token> Tokenizer::Tokenize(const UnicodeText& text_unicode) const { in Tokenize() function in libtextclassifier3::Tokenizer
/external/libtextclassifier/native/utils/grammar/parsing/
Dlexer_test.cc88 std::vector<Token> tokens = tokenizer_.Tokenize("This is a word"); in TEST_F()
97 std::vector<Token> tokens = tokenizer_.Tokenize("1234This a4321cde"); in TEST_F()
107 std::vector<Token> tokens = tokenizer_.Tokenize("10/18/2014"); in TEST_F()
117 std::vector<Token> tokens = tokenizer_.Tokenize("电话:0871—6857(曹"); in TEST_F()
130 std::vector<Token> tokens = tokenizer_.Tokenize("电话 :0871—6857(曹"); in TEST_F()
144 tokenizer_.Tokenize("The.qUIck\n brown2345fox88 \xE2\x80\x94 the"); in TEST_F()
158 std::vector<Token> tokens = tokenizer_.Tokenize("The+2345++the +"); in TEST_F()
/external/icing/icing/tokenization/
Dverbatim-tokenizer_test.cc97 auto token_iterator = verbatim_tokenizer->Tokenize(kText).ValueOrDie(); in TEST_F()
110 auto token_iterator = verbatim_tokenizer->Tokenize(kText).ValueOrDie(); in TEST_F()
137 auto token_iterator = verbatim_tokenizer->Tokenize(kText).ValueOrDie(); in TEST_F()
162 auto token_iterator = verbatim_tokenizer->Tokenize(kText).ValueOrDie(); in TEST_F()
182 auto token_iterator = verbatim_tokenizer->Tokenize(kText).ValueOrDie(); in TEST_F()
198 auto token_iterator = verbatim_tokenizer->Tokenize(kText).ValueOrDie(); in TEST_F()
Dverbatim-tokenizer.cc127 VerbatimTokenizer::Tokenize(std::string_view text) const { in Tokenize() function in icing::lib::VerbatimTokenizer
134 Tokenize(text)); in TokenizeAll()
Dplain-tokenizer_test.cc71 plain_tokenizer->Tokenize(kText)); in TEST_F()
90 plain_tokenizer->Tokenize(kText)); in TEST_F()
347 auto iterator = plain_tokenizer->Tokenize(kText).ValueOrDie(); in TEST_F()
368 auto iterator = plain_tokenizer->Tokenize(kText).ValueOrDie(); in TEST_F()
415 auto iterator = plain_tokenizer->Tokenize(kText).ValueOrDie(); in TEST_F()
469 auto iterator = plain_tokenizer->Tokenize(kText).ValueOrDie(); in TEST_F()
Dplain-tokenizer.cc133 PlainTokenizer::Tokenize(std::string_view text) const { in Tokenize() function in icing::lib::PlainTokenizer
143 Tokenize(text)); in TokenizeAll()
/external/icing/icing/util/
Dtokenized-document.cc37 libtextclassifier3::StatusOr<std::vector<TokenizedSection>> Tokenize( in Tokenize() function
49 tokenizer->Tokenize(subcontent)); in Tokenize()
82 Tokenize(schema_store, language_segmenter, in Create()
/external/perfetto/src/trace_processor/importers/proto/
Dproto_trace_parser_unittest.cc284 util::Status Tokenize() { in Tokenize() function in perfetto::trace_processor::__anon06099b9a0111::ProtoTraceParserTest
352 Tokenize(); in TEST_F()
384 Tokenize(); in TEST_F()
435 Tokenize(); in TEST_F()
503 Tokenize(); in TEST_F()
549 Tokenize(); in TEST_F()
572 Tokenize(); in TEST_F()
592 Tokenize(); in TEST_F()
607 Tokenize(); in TEST_F()
625 Tokenize(); in TEST_F()
[all …]
/external/perfetto/src/trace_processor/importers/fuchsia/
Dfuchsia_parser_unittest.cc275 util::Status Tokenize() { in Tokenize() function in perfetto::trace_processor::__anon92d29aca0111::FuchsiaTraceParserTest
305 EXPECT_FALSE(Tokenize().ok()); in TEST_F()
332 EXPECT_TRUE(Tokenize().ok()); in TEST_F()
370 EXPECT_TRUE(Tokenize().ok()); in TEST_F()
480 auto status = Tokenize(); in TEST_F()
534 EXPECT_TRUE(Tokenize().ok()); in TEST_F()
605 EXPECT_TRUE(Tokenize().ok()); in TEST_F()
/external/perfetto/src/trace_processor/util/
Dstreaming_line_reader.cc42 size_t consumed = Tokenize(base::StringView(buf_.data(), buf_.size())); in EndWrite()
51 size_t StreamingLineReader::Tokenize(base::StringView input) { in Tokenize() function in perfetto::trace_processor::util::StreamingLineReader
Dstreaming_line_reader_unittest.cc56 TEST(StreamingLineReaderTest, Tokenize) { in TEST() argument
60 slr.Tokenize("a12\nb3456\nc\nd78\n\ne12\nf3456\n"); in TEST()
/external/pigweed/pw_tokenizer/size_report/
DBUILD.gn35 # Tokenize string size report executable.
54 # Tokenize string expression size report executable.
DBUILD.bazel38 # Tokenize string size report binary.
59 # Tokenize string expression size report binary.
/external/licenseclassifier/stringclassifier/searchset/tokenizer/
Dtokenizer_test.go62 if got := Tokenize(tt.text); !reflect.DeepEqual(got, tt.want) {
110 toks := Tokenize(tt.text)
/external/tflite-support/tensorflow_lite_support/ios/text/tokenizers/Sources/
DTFLTokenizerUtil.mm21 NSArray<NSString *> *Tokenize(Tokenizer *tokenizer, NSString *input) { function
22 TokenizerResult tokenize_result = tokenizer->Tokenize(MakeString(input));
/external/tflite-support/tensorflow_lite_support/cc/test/text/
Dregex_tokenizer_test.cc48 auto results = tokenizer->Tokenize("good morning, i'm your teacher.\n"); in TEST()
59 auto results = tokenizer->Tokenize("good morning, i'm your teacher.\n"); in TEST()
/external/google-breakpad/src/processor/
Dbasic_source_line_resolver.cc79 if (!Tokenize(line, separators, max_tokens - 1, tokens)) { in TokenizeWithOptionalField()
87 if (!Tokenize(tokens->back(), separators, 2, &last_tokens)) { in TokenizeWithOptionalField()
513 if (!Tokenize(file_line, kWhitespace, 2, &tokens)) { in ParseFile()
576 if (!Tokenize(line_line, kWhitespace, 4, &tokens)) { in ParseLine()
Dtokenize.h53 bool Tokenize(char *line,
/external/libtextclassifier/native/annotator/
Dfeature-processor_test.cc383 std::vector<Token> tokens = feature_processor.Tokenize("one, two, three"); in TEST_F()
424 tokens = feature_processor3.Tokenize("zero, one, two, three, four"); in TEST_F()
457 std::vector<Token> tokens = feature_processor.Tokenize("one, two, three"); in TEST_F()
498 tokens = feature_processor3.Tokenize("zero, one, two, three, four"); in TEST_F()
629 {0, 3}, feature_processor.Tokenize("aaa bbb ccc")), in TEST_F()
632 {0, 3}, feature_processor.Tokenize("aaa bbb ěěě")), in TEST_F()
635 {0, 3}, feature_processor.Tokenize("ěěě řřř ěěě")), in TEST_F()
638 {0, 0}, feature_processor.Tokenize("")), in TEST_F()
/external/perfetto/src/trace_processor/importers/systrace/
Dsystrace_line_tokenizer.h33 util::Status Tokenize(const std::string& line, SystraceLine*);
/external/libtextclassifier/native/lang_id/
Dcustom-tokenizer.h42 void Tokenize(StringPiece text, LightSentence *sentence) const;

1234