Home
last modified time | relevance | path

Searched refs:Tokenize (Results 1 – 25 of 82) sorted by relevance

1234

/external/libtextclassifier/native/annotator/duration/
Dduration_test.cc112 std::vector<Token> Tokenize(const UnicodeText& text) { in Tokenize() function in libtextclassifier3::__anoncdfecbf10111::DurationAnnotatorTest
113 return feature_processor_->Tokenize(text); in Tokenize()
152 std::vector<Token> tokens = Tokenize(text); in TEST_F()
171 std::vector<Token> tokens = Tokenize(text); in TEST_F()
190 std::vector<Token> tokens = Tokenize(text); in TEST_F()
209 std::vector<Token> tokens = Tokenize(text); in TEST_F()
228 std::vector<Token> tokens = Tokenize(text); in TEST_F()
247 std::vector<Token> tokens = Tokenize(text); in TEST_F()
266 std::vector<Token> tokens = Tokenize(text); in TEST_F()
286 std::vector<Token> tokens = Tokenize(text); in TEST_F()
[all …]
/external/libtextclassifier/native/utils/
Dtokenizer_test.cc94 std::vector<Token> Tokenize(const std::string& utf8_text) const { in Tokenize() function in libtextclassifier3::__anon5b5dd84c0111::TestingTokenizerProxy
95 return tokenizer_->Tokenize(utf8_text); in Tokenize()
178 std::vector<Token> tokens = tokenizer.Tokenize("Hello world!"); in TEST()
213 EXPECT_THAT(tokenizer.Tokenize("앨라배마 주 전화(123) 456-789웹사이트"), in TEST()
351 tokens = tokenizer.Tokenize( in TEST()
355 tokens = tokenizer.Tokenize("問少目 hello 木輸ยามきゃ"); in TEST()
379 std::vector<Token> tokens = tokenizer.Tokenize("พระบาท สมเด็จ พระ ปร มิ"); in TEST()
400 tokenizer.Tokenize("The interval is: -(12, 138*)"); in TEST()
427 std::vector<Token> tokens = tokenizer.Tokenize("3.1 3﹒2 3.3"); in TEST()
445 std::vector<Token> tokens = tokenizer.Tokenize("พระบาทสมเด็จพระปรมิ"); in TEST()
[all …]
Dbert_tokenizer.cc64 return BertTokenizer::Tokenize(tokens); in TokenizeSingleToken()
67 TokenizerResult BertTokenizer::Tokenize(const std::string& input) { in Tokenize() function in libtextclassifier3::BertTokenizer
69 return BertTokenizer::Tokenize(tokens); in Tokenize()
72 TokenizerResult BertTokenizer::Tokenize( in Tokenize() function in libtextclassifier3::BertTokenizer
Dbert_tokenizer_test.cc31 auto results = tokenizer->Tokenize("i'm question"); in AssertTokenizerResults()
67 auto results = tokenizer->Tokenize("i'm questionansweraskask"); in TEST()
81 auto results = tokenizer->Tokenize("i'm questionansweraskask"); in TEST()
Dtokenizer.h93 std::vector<Token> Tokenize(const std::string& text) const;
96 std::vector<Token> Tokenize(const UnicodeText& text_unicode) const;
Dbert_tokenizer.h105 TokenizerResult Tokenize(const std::string& input) override;
112 TokenizerResult Tokenize(const std::vector<std::string>& tokens);
Dtokenizer.cc97 std::vector<Token> Tokenizer::Tokenize(const std::string& text) const { in Tokenize() function in libtextclassifier3::Tokenizer
99 return Tokenize(text_unicode); in Tokenize()
102 std::vector<Token> Tokenizer::Tokenize(const UnicodeText& text_unicode) const { in Tokenize() function in libtextclassifier3::Tokenizer
/external/libtextclassifier/native/utils/grammar/parsing/
Dlexer_test.cc88 std::vector<Token> tokens = tokenizer_.Tokenize("This is a word"); in TEST_F()
97 std::vector<Token> tokens = tokenizer_.Tokenize("1234This a4321cde"); in TEST_F()
107 std::vector<Token> tokens = tokenizer_.Tokenize("10/18/2014"); in TEST_F()
117 std::vector<Token> tokens = tokenizer_.Tokenize("电话:0871—6857(曹"); in TEST_F()
130 std::vector<Token> tokens = tokenizer_.Tokenize("电话 :0871—6857(曹"); in TEST_F()
144 tokenizer_.Tokenize("The.qUIck\n brown2345fox88 \xE2\x80\x94 the"); in TEST_F()
158 std::vector<Token> tokens = tokenizer_.Tokenize("The+2345++the +"); in TEST_F()
/external/icing/icing/util/
Dtokenized-document.cc39 tokenized_document.Tokenize(schema_store, language_segmenter)); in Create()
46 libtextclassifier3::Status TokenizedDocument::Tokenize( in Tokenize() function in icing::lib::TokenizedDocument
61 tokenizer->Tokenize(subcontent)); in Tokenize()
/external/perfetto/src/trace_processor/importers/proto/
Dproto_trace_parser_unittest.cc266 util::Status Tokenize() { in Tokenize() function in perfetto::trace_processor::__anonde9b72fc0111::ProtoTraceParserTest
335 Tokenize(); in TEST_F()
366 Tokenize(); in TEST_F()
416 Tokenize(); in TEST_F()
483 Tokenize(); in TEST_F()
528 Tokenize(); in TEST_F()
550 Tokenize(); in TEST_F()
569 Tokenize(); in TEST_F()
583 Tokenize(); in TEST_F()
600 Tokenize(); in TEST_F()
[all …]
/external/icing/icing/tokenization/
Dplain-tokenizer.cc128 PlainTokenizer::Tokenize(std::string_view text) const { in Tokenize() function in icing::lib::PlainTokenizer
138 Tokenize(text)); in TokenizeAll()
Dplain-tokenizer_test.cc295 auto iterator = plain_tokenizer->Tokenize(kText).ValueOrDie(); in TEST_F()
315 auto iterator = plain_tokenizer->Tokenize(kText).ValueOrDie(); in TEST_F()
360 auto iterator = plain_tokenizer->Tokenize(kText).ValueOrDie(); in TEST_F()
411 auto iterator = plain_tokenizer->Tokenize(kText).ValueOrDie(); in TEST_F()
Dplain-tokenizer.h35 libtextclassifier3::StatusOr<std::unique_ptr<Tokenizer::Iterator>> Tokenize(
Draw-query-tokenizer.h35 libtextclassifier3::StatusOr<std::unique_ptr<Tokenizer::Iterator>> Tokenize(
/external/tflite-support/tensorflow_lite_support/ios/text/tokenizers/Sources/
DTFLTokenizerUtil.mm21 NSArray<NSString *> *Tokenize(Tokenizer *tokenizer, NSString *input) { function
22 TokenizerResult tokenize_result = tokenizer->Tokenize(MakeString(input));
DTFLTokenizerUtil.h28 NSArray<NSString *> *Tokenize(Tokenizer *tokenizer, NSString *input);
/external/google-breakpad/src/processor/
Dtokenize.h53 bool Tokenize(char *line,
Dbasic_source_line_resolver.cc79 if (!Tokenize(line, separators, max_tokens - 1, tokens)) { in TokenizeWithOptionalField()
87 if (!Tokenize(tokens->back(), separators, 2, &last_tokens)) { in TokenizeWithOptionalField()
513 if (!Tokenize(file_line, kWhitespace, 2, &tokens)) { in ParseFile()
576 if (!Tokenize(line_line, kWhitespace, 4, &tokens)) { in ParseLine()
/external/perfetto/src/trace_processor/importers/systrace/
Dsystrace_line_tokenizer.h33 util::Status Tokenize(const std::string& line, SystraceLine*);
/external/libtextclassifier/native/lang_id/
Dcustom-tokenizer.h42 void Tokenize(StringPiece text, LightSentence *sentence) const;
/external/google-breakpad/src/tools/windows/converter_exe/
Dtokenizer.h45 static void Tokenize(const string &delimiters, const string &input,
/external/tensorflow/tensorflow/lite/testing/nnapi_tflite_zip_tests/
Dtokenize.h40 void Tokenize(std::istream* input, TokenProcessor* processor);
/external/tensorflow/tensorflow/lite/testing/
Dtokenize.h37 void Tokenize(std::istream* input, TokenProcessor* processor);
/external/python/cpython2/Misc/NEWS.d/next/Library/
D2018-06-24-01-57-14.bpo-33899.IaOcAr.rst1 Tokenize module now implicitly emits a NEWLINE when provided with input that
/external/tflite-support/tensorflow_lite_support/cc/text/tokenizers/
Dtokenizer.h38 virtual TokenizerResult Tokenize(const std::string& input) = 0;

1234