Home
last modified time | relevance | path

Searched defs:Tokenize (Results 1 – 15 of 15) sorted by relevance

/external/google-breakpad/src/processor/
Dtokenize.cc45 bool Tokenize(char *line, in Tokenize() function
/external/perfetto/src/trace_processor/importers/systrace/
Dsystrace_line_tokenizer.cc49 util::Status SystraceLineTokenizer::Tokenize(const std::string& buffer, in Tokenize() function in perfetto::trace_processor::SystraceLineTokenizer
/external/chromium-trace/catapult/common/py_utils/py_utils/refactor/
Doffset_token.py60 def Tokenize(f): function
/external/libtextclassifier/native/lang_id/
Dcustom-tokenizer.cc102 void TokenizerForLangId::Tokenize(StringPiece text, in Tokenize() function in libtextclassifier3::mobile::lang_id::TokenizerForLangId
/external/tensorflow/tensorflow/lite/testing/nnapi_tflite_zip_tests/
Dtokenize.cc26 void Tokenize(std::istream* input, TokenProcessor* processor) { in Tokenize() function
/external/tensorflow/tensorflow/lite/testing/
Dtokenize.cc25 void Tokenize(std::istream* input, TokenProcessor* processor) { in Tokenize() function
/external/libtextclassifier/native/utils/
Dtokenizer.cc93 std::vector<Token> Tokenizer::Tokenize(const std::string& text) const { in Tokenize() function in libtextclassifier3::Tokenizer
98 std::vector<Token> Tokenizer::Tokenize(const UnicodeText& text_unicode) const { in Tokenize() function in libtextclassifier3::Tokenizer
Dtokenizer_test.cc94 std::vector<Token> Tokenize(const std::string& utf8_text) const { in Tokenize() function in libtextclassifier3::__anonf2ccfc700111::TestingTokenizerProxy
/external/libtextclassifier/native/annotator/duration/
Dduration_test.cc112 std::vector<Token> Tokenize(const UnicodeText& text) { in Tokenize() function in libtextclassifier3::__anon8aa146150111::DurationAnnotatorTest
493 std::vector<Token> Tokenize(const UnicodeText& text) { in Tokenize() function in libtextclassifier3::__anon8aa146150111::JapaneseDurationAnnotatorTest
/external/perfetto/src/traced/probes/ftrace/kallsyms/
Dkernel_symbol_map.cc143 void Tokenize(const char* name, Lambda fn) { in Tokenize() function
/external/libtextclassifier/native/annotator/
Dfeature-processor.cc192 std::vector<Token> FeatureProcessor::Tokenize(const std::string& text) const { in Tokenize() function in libtextclassifier3::FeatureProcessor
196 std::vector<Token> FeatureProcessor::Tokenize( in Tokenize() function in libtextclassifier3::FeatureProcessor
/external/googletest/googlemock/scripts/
Dpump.py383 def Tokenize(s): function
/external/google-breakpad/src/testing/gtest/scripts/
Dpump.py382 def Tokenize(s): function
/external/libtextclassifier/native/actions/
Dactions-suggestions.cc446 std::vector<std::vector<Token>> ActionsSuggestions::Tokenize( in Tokenize() function in libtextclassifier3::ActionsSuggestions
/external/perfetto/src/trace_processor/importers/proto/
Dproto_trace_parser_unittest.cc235 util::Status Tokenize() { in Tokenize() function in perfetto::trace_processor::__anon1e8400a00111::ProtoTraceParserTest