| /external/tflite-support/tensorflow_lite_support/ios/text/tokenizers/Sources/ |
| D | TFLTokenizerUtil.mm | 21 NSArray<NSString *> *Tokenize(Tokenizer *tokenizer, NSString *input) { function
|
| /external/google-breakpad/src/tools/windows/converter_exe/ |
| D | tokenizer.cc | 36 void Tokenizer::Tokenize(const string &delimiters, const string &input, in Tokenize() function in crash::Tokenizer
|
| /external/google-breakpad/src/processor/ |
| D | tokenize.cc | 45 bool Tokenize(char *line, in Tokenize() function
|
| /external/tflite-support/tensorflow_lite_support/cc/text/tokenizers/ |
| D | sentencepiece_tokenizer.h | 46 TokenizerResult Tokenize(const std::string& input) override { in Tokenize() function
|
| D | bert_tokenizer.cc | 58 TokenizerResult BertTokenizer::Tokenize(const std::string& input) { in Tokenize() function in tflite::support::text::tokenizer::BertTokenizer
|
| D | regex_tokenizer.cc | 60 TokenizerResult RegexTokenizer::Tokenize(const std::string& input) { in Tokenize() function in tflite::support::text::tokenizer::RegexTokenizer
|
| /external/perfetto/src/trace_processor/util/ |
| D | streaming_line_reader.cc | 51 size_t StreamingLineReader::Tokenize(base::StringView input) { in Tokenize() function in perfetto::trace_processor::util::StreamingLineReader
|
| D | streaming_line_reader_unittest.cc | 56 TEST(StreamingLineReaderTest, Tokenize) { in TEST() argument
|
| /external/icing/icing/util/ |
| D | tokenized-document.cc | 37 libtextclassifier3::StatusOr<std::vector<TokenizedSection>> Tokenize( in Tokenize() function
|
| /external/perfetto/src/trace_processor/importers/systrace/ |
| D | systrace_line_tokenizer.cc | 49 util::Status SystraceLineTokenizer::Tokenize(const std::string& buffer, in Tokenize() function in perfetto::trace_processor::SystraceLineTokenizer
|
| /external/chromium-trace/catapult/common/py_utils/py_utils/refactor/ |
| D | offset_token.py | 60 def Tokenize(f): function
|
| /external/libtextclassifier/native/lang_id/ |
| D | custom-tokenizer.cc | 102 void TokenizerForLangId::Tokenize(StringPiece text, in Tokenize() function in libtextclassifier3::mobile::lang_id::TokenizerForLangId
|
| /external/icing/icing/tokenization/ |
| D | verbatim-tokenizer.cc | 127 VerbatimTokenizer::Tokenize(std::string_view text) const { in Tokenize() function in icing::lib::VerbatimTokenizer
|
| D | plain-tokenizer.cc | 133 PlainTokenizer::Tokenize(std::string_view text) const { in Tokenize() function in icing::lib::PlainTokenizer
|
| D | rfc822-tokenizer.cc | 781 Rfc822Tokenizer::Tokenize(std::string_view text) const { in Tokenize() function in icing::lib::Rfc822Tokenizer
|
| /external/libtextclassifier/native/utils/ |
| D | tokenizer.cc | 99 std::vector<Token> Tokenizer::Tokenize(absl::string_view text) const { in Tokenize() function in libtextclassifier3::Tokenizer 104 std::vector<Token> Tokenizer::Tokenize(const UnicodeText& text_unicode) const { in Tokenize() function in libtextclassifier3::Tokenizer
|
| D | bert_tokenizer.cc | 76 TokenizerResult BertTokenizer::Tokenize(const std::string& input) { in Tokenize() function in libtextclassifier3::BertTokenizer
|
| /external/tensorflow/tensorflow/lite/testing/nnapi_tflite_zip_tests/ |
| D | tokenize.cc | 26 void Tokenize(std::istream* input, TokenProcessor* processor) { in Tokenize() function
|
| /external/tensorflow/tensorflow/lite/testing/ |
| D | tokenize.cc | 25 void Tokenize(std::istream* input, TokenProcessor* processor) { in Tokenize() function
|
| /external/licenseclassifier/stringclassifier/searchset/tokenizer/ |
| D | tokenizer.go | 44 func Tokenize(s string) (toks Tokens) { func
|
| /external/perfetto/src/trace_processor/importers/proto/ |
| D | proto_trace_tokenizer.h | 45 util::Status Tokenize(TraceBlobView blob, Callback callback) { in Tokenize() function
|
| /external/tflite-support/tensorflow_lite_support/custom_ops/kernel/ |
| D | whitespace_tokenizer.cc | 60 std::vector<std::pair<const char*, int>> Tokenize(StringRef str) { in Tokenize() function
|
| /external/libtextclassifier/native/utils/tflite/ |
| D | string_projection.cc | 278 std::vector<std::string> Tokenize(const std::string& input, size_t max_input, in Tokenize() function in tflite::ops::custom::libtextclassifier3::string_projection::__anondff7419a0111::ProjectionTokenizer 283 std::vector<std::string> Tokenize(const char* input_ptr, size_t len, in Tokenize() function in tflite::ops::custom::libtextclassifier3::string_projection::__anondff7419a0111::ProjectionTokenizer
|
| /external/libtextclassifier/native/annotator/duration/ |
| D | duration_test.cc | 133 std::vector<Token> Tokenize(const UnicodeText& text) { in Tokenize() function in libtextclassifier3::__anon5ab10c4f0111::DurationAnnotatorTest 601 std::vector<Token> Tokenize(const UnicodeText& text) { in Tokenize() function in libtextclassifier3::__anon5ab10c4f0111::JapaneseDurationAnnotatorTest
|
| /external/perfetto/src/trace_processor/importers/fuchsia/ |
| D | fuchsia_parser_unittest.cc | 275 util::Status Tokenize() { in Tokenize() function in perfetto::trace_processor::__anon92d29aca0111::FuchsiaTraceParserTest
|