Home
last modified time | relevance | path

Searched refs:tokenizer_ (Results 1 – 25 of 40) sorted by relevance

12

/external/deqp-deps/amber/src/amberscript/
Dparser.cc282 return std::to_string(tokenizer_->GetCurrentLine()) + ": " + err; in make_error()
286 tokenizer_ = MakeUnique<Tokenizer>(data); in Parse()
288 for (auto token = tokenizer_->NextToken(); !token->IsEOS(); in Parse()
289 token = tokenizer_->NextToken()) { in Parse()
447 auto token = tokenizer_->NextToken(); in ValidateEndOfStatement()
455 auto token = tokenizer_->NextToken(); in ParseShaderBlock()
466 token = tokenizer_->NextToken(); in ParseShaderBlock()
472 token = tokenizer_->NextToken(); in ParseShaderBlock()
501 token = tokenizer_->PeekNextToken(); in ParseShaderBlock()
503 tokenizer_->NextToken(); in ParseShaderBlock()
[all …]
/external/deqp-deps/amber/src/vkscript/
Dcommand_parser.cc57 tokenizer_(MakeUnique<Tokenizer>(data)) { in CommandParser()
58 tokenizer_->SetCurrentLine(current_line); in CommandParser()
64 return std::to_string(tokenizer_->GetCurrentLine()) + ": " + err; in make_error()
87 for (auto token = tokenizer_->NextToken(); !token->IsEOS(); in Parse()
88 token = tokenizer_->NextToken()) { in Parse()
101 token = tokenizer_->NextToken(); in Parse()
127 token = tokenizer_->NextToken(); in Parse()
139 token = tokenizer_->NextToken(); in Parse()
150 token = tokenizer_->NextToken(); in Parse()
256 cmd->SetLine(tokenizer_->GetCurrentLine()); in ProcessDrawRect()
[all …]
/external/libtextclassifier/native/utils/grammar/parsing/
Dlexer_test.cc53 tokenizer_(TokenizationType_ICU, unilib_.get(), in LexerTest()
71 Tokenizer tokenizer_; member in libtextclassifier3::grammar::__anon5574d74a0111::LexerTest
88 std::vector<Token> tokens = tokenizer_.Tokenize("This is a word"); in TEST_F()
97 std::vector<Token> tokens = tokenizer_.Tokenize("1234This a4321cde"); in TEST_F()
107 std::vector<Token> tokens = tokenizer_.Tokenize("10/18/2014"); in TEST_F()
117 std::vector<Token> tokens = tokenizer_.Tokenize("电话:0871—6857(曹"); in TEST_F()
130 std::vector<Token> tokens = tokenizer_.Tokenize("电话 :0871—6857(曹"); in TEST_F()
144 tokenizer_.Tokenize("The.qUIck\n brown2345fox88 \xE2\x80\x94 the"); in TEST_F()
158 std::vector<Token> tokens = tokenizer_.Tokenize("The+2345++the +"); in TEST_F()
/external/perfetto/src/trace_processor/importers/proto/
Dtrack_event_module.cc36 tokenizer_(context, track_event_tracker_.get()), in TrackEventModule()
54 return tokenizer_.TokenizeTrackDescriptorPacket(state, decoder, in TokenizePacket()
57 tokenizer_.TokenizeTrackEventPacket(state, decoder, packet, in TokenizePacket()
62 return tokenizer_.TokenizeThreadDescriptorPacket(state, decoder); in TokenizePacket()
Dtrack_event_module.h50 TrackEventTokenizer tokenizer_; variable
/external/protobuf/src/google/protobuf/
Dtext_format.cc267 tokenizer_(input_stream, &tokenizer_error_collector_), in ParserImpl()
280 tokenizer_.set_allow_f_after_float(true); in ParserImpl()
283 tokenizer_.set_comment_style(io::Tokenizer::SH_COMMENT_STYLE); in ParserImpl()
286 tokenizer_.set_require_space_after_number(false); in ParserImpl()
287 tokenizer_.set_allow_multiline_strings(true); in ParserImpl()
291 tokenizer_.Next(); in ParserImpl()
357 ReportError(tokenizer_.current().line, tokenizer_.current().column, in ReportError()
364 ReportWarning(tokenizer_.current().line, tokenizer_.current().column, in ReportWarning()
402 int start_line = tokenizer_.current().line; in ConsumeField()
403 int start_column = tokenizer_.current().column; in ConsumeField()
[all …]
/external/tflite-support/tensorflow_lite_support/cc/task/text/qa/
Dbert_question_answerer.cc199 query_tokenize_results = tokenizer_->Tokenize(processed_query); in Preprocess()
215 std::vector<std::string> sub_tokens = tokenizer_->Tokenize(token).subwords; in Preprocess()
264 tokenizer_->LookupId(token, &input_ids[i]); in Preprocess()
362 ASSIGN_OR_RETURN(tokenizer_, in InitializeFromMetadata()
370 tokenizer_ = absl::make_unique<BertTokenizer>(path_to_vocab); in InitializeBertTokenizer()
375 tokenizer_ = in InitializeBertTokenizerFromBinary()
381 tokenizer_ = absl::make_unique<SentencePieceTokenizer>(path_to_spmodel); in InitializeSentencepieceTokenizer()
386 tokenizer_ = absl::make_unique<SentencePieceTokenizer>(spmodel_buffer_data, in InitializeSentencepieceTokenizerFromBinary()
/external/libtextclassifier/native/utils/grammar/
Danalyzer.cc32 tokenizer_(owned_tokenizer_.get()),
41 : tokenizer_(tokenizer), in Analyzer()
83 context.tokens = tokenizer_->Tokenize(context.text); in BuildTextContextForInput()
Danalyzer.h57 const Tokenizer* tokenizer_; variable
/external/perfetto/src/protozero/filtering/
Dmessage_filter.cc109 tokenizer_ = MessageTokenizer(); in FilterMessageFragments()
135 if (stack_.size() != 1 || !tokenizer_.idle() || in FilterMessageFragments()
158 MessageTokenizer::Token token = tokenizer_.Push(octet); in FilterOneByte()
253 if (PERFETTO_UNLIKELY(!tokenizer_.idle())) { in FilterOneByte()
262 PERFETTO_DCHECK(tokenizer_.idle()); in FilterOneByte()
/external/libtextclassifier/native/actions/
Dfeature-processor.h61 const Tokenizer* tokenizer() const { return tokenizer_.get(); } in tokenizer()
65 const std::unique_ptr<Tokenizer> tokenizer_; variable
Dngram-model.cc85 tokenizer_ = owned_tokenizer_.get(); in NGramSensitiveModel()
87 tokenizer_ = tokenizer; in NGramSensitiveModel()
156 const std::vector<Token> raw_tokens = tokenizer_->Tokenize(text); in Eval()
Dgrammar-actions.cc34 tokenizer_(CreateTokenizer(grammar_rules->tokenizer_options(), unilib)), in GrammarActions()
36 analyzer_(unilib, grammar_rules->rules(), tokenizer_.get()), in GrammarActions()
Dgrammar-actions.h56 const std::unique_ptr<Tokenizer> tokenizer_; variable
Dngram-model.h66 const Tokenizer* tokenizer_; variable
/external/perfetto/src/trace_processor/importers/ftrace/
Dftrace_module_impl.cc32 : tokenizer_(context), parser_(context) { in FtraceModuleImpl()
46 tokenizer_.TokenizeFtraceBundle(packet->slice(fld_off, ftrace_field.size), in TokenizePacket()
Dftrace_module_impl.h52 FtraceTokenizer tokenizer_;
/external/libtextclassifier/native/annotator/number/
Dnumber.h44 tokenizer_(Tokenizer(TokenizationType_LETTER_DIGIT, unilib, in NumberAnnotator()
117 const Tokenizer tokenizer_; variable
/external/tflite-support/tensorflow_lite_support/cc/task/text/nlclassifier/
Dbert_nl_classifier.cc79 input_tokenize_results = tokenizer_->Tokenize(processed_input); in Preprocess()
103 tokenizer_->LookupId(tokens[i], &input_ids[i]); in Preprocess()
184 ASSIGN_OR_RETURN(tokenizer_, in InitializeFromMetadata()
Dnl_classifier.cc204 TokenizerResult result = tokenizer_->Tokenize(input); in Preprocess()
211 tokenizer_->GetUnknownToken(&unknown_token_id); in Preprocess()
214 tokenizer_->GetPadToken(&pad_token_id); in Preprocess()
219 if (tokenizer_->GetStartToken(&start_token_id)) { in Preprocess()
229 if (tokenizer_->LookupId(token, &token_id)) { in Preprocess()
458 tokenizer_ = std::unique_ptr<RegexTokenizer>( in SetupRegexTokenizer()
/external/libtextclassifier/native/lang_id/
Dlang-id.cc142 tokenizer_.Tokenize(text, &sentence); in FindLanguages()
212 tokenizer_.Setup(context); in Setup()
266 TokenizerForLangId tokenizer_; member in libtextclassifier3::mobile::lang_id::LangIdImpl
/external/libtextclassifier/native/utils/grammar/testing/
Dutils.h73 tokenizer_(libtextclassifier3::TokenizationType_ICU, unilib_.get(), in GrammarTest()
82 context.tokens = tokenizer_.Tokenize(context.text); in TextContextForText()
234 const Tokenizer tokenizer_; variable
/external/libtextclassifier/native/annotator/
Dfeature-processor.h113 tokenizer_(internal::BuildTokenizer(options, unilib)) { in FeatureProcessor()
314 Tokenizer tokenizer_; variable
/external/libtextclassifier/native/utils/
Dtokenizer_test.cc79 tokenizer_ = std::unique_ptr<TestingTokenizer>(new TestingTokenizer( in TestingTokenizerProxy()
86 tokenizer_->FindTokenizationRange(c); in TestFindTokenizationRole()
95 return tokenizer_->Tokenize(utf8_text); in Tokenize()
101 std::unique_ptr<TestingTokenizer> tokenizer_; member in libtextclassifier3::__anon5b5dd84c0111::TestingTokenizerProxy
/external/perfetto/src/trace_processor/
Dread_trace.cc85 return tokenizer_.Tokenize( in Parse()
107 ProtoTraceTokenizer tokenizer_; member in perfetto::trace_processor::__anon350fa3820111::SerializingProtoTraceReader

12