| /external/google-breakpad/src/processor/ |
| D | tokenize.cc | 45 bool Tokenize(char *line, in Tokenize() function
|
| /external/libtextclassifier/utils/ |
| D | tokenizer.cc | 90 std::vector<Token> Tokenizer::Tokenize(const std::string& text) const { in Tokenize() function in libtextclassifier3::Tokenizer 95 std::vector<Token> Tokenizer::Tokenize(const UnicodeText& text_unicode) const { in Tokenize() function in libtextclassifier3::Tokenizer
|
| D | tokenizer_test.cc | 92 std::vector<Token> Tokenize(const std::string& utf8_text) const { in Tokenize() function in libtextclassifier3::__anonfe576b980111::TestingTokenizerProxy
|
| /external/libtextclassifier/lang_id/ |
| D | custom-tokenizer.cc | 102 void TokenizerForLangId::Tokenize(StringPiece text, in Tokenize() function in libtextclassifier3::mobile::lang_id::TokenizerForLangId
|
| /external/chromium-trace/catapult/common/py_utils/py_utils/refactor/ |
| D | offset_token.py | 55 def Tokenize(f): function
|
| /external/tensorflow/tensorflow/lite/testing/nnapi_tflite_zip_tests/ |
| D | tokenize.cc | 26 void Tokenize(std::istream* input, TokenProcessor* processor) { in Tokenize() function
|
| /external/tensorflow/tensorflow/lite/testing/ |
| D | tokenize.cc | 23 void Tokenize(std::istream* input, TokenProcessor* processor) { in Tokenize() function
|
| /external/libtextclassifier/annotator/duration/ |
| D | duration_test.cc | 108 std::vector<Token> Tokenize(const UnicodeText& text) { in Tokenize() function in libtextclassifier3::__anond123b13d0111::DurationAnnotatorTest
|
| /external/libtextclassifier/annotator/ |
| D | feature-processor.cc | 191 std::vector<Token> FeatureProcessor::Tokenize(const std::string& text) const { in Tokenize() function in libtextclassifier3::FeatureProcessor 195 std::vector<Token> FeatureProcessor::Tokenize( in Tokenize() function in libtextclassifier3::FeatureProcessor
|
| /external/googletest/googletest/scripts/ |
| D | pump.py | 382 def Tokenize(s): function
|
| /external/google-breakpad/src/testing/gtest/scripts/ |
| D | pump.py | 382 def Tokenize(s): function
|
| /external/perfetto/src/trace_processor/ |
| D | proto_trace_parser_unittest.cc | 165 void Tokenize() { in Tokenize() function in perfetto::trace_processor::__anon83c82cc70111::ProtoTraceParserTest
|
| /external/libtextclassifier/actions/ |
| D | actions-suggestions.cc | 590 std::vector<std::vector<Token>> ActionsSuggestions::Tokenize( in Tokenize() function in libtextclassifier3::ActionsSuggestions
|