Home
last modified time | relevance | path

Searched refs:tokenizer_ (Results 1 – 25 of 25) sorted by relevance

/external/deqp-deps/amber/src/amberscript/
Dparser.cc132 return std::to_string(tokenizer_->GetCurrentLine()) + ": " + err; in make_error()
136 tokenizer_ = MakeUnique<Tokenizer>(data); in Parse()
138 for (auto token = tokenizer_->NextToken(); !token->IsEOS(); in Parse()
139 token = tokenizer_->NextToken()) { in Parse()
284 auto token = tokenizer_->NextToken(); in ValidateEndOfStatement()
291 auto token = tokenizer_->NextToken(); in ParseShaderBlock()
302 token = tokenizer_->NextToken(); in ParseShaderBlock()
308 token = tokenizer_->NextToken(); in ParseShaderBlock()
340 std::string data = tokenizer_->ExtractToNext("END"); in ParseShaderBlock()
346 token = tokenizer_->NextToken(); in ParseShaderBlock()
[all …]
Dparser.h92 std::unique_ptr<Tokenizer> tokenizer_; variable
/external/deqp-deps/amber/src/vkscript/
Dcommand_parser.cc57 tokenizer_(MakeUnique<Tokenizer>(data)) { in CommandParser()
58 tokenizer_->SetCurrentLine(current_line); in CommandParser()
64 return std::to_string(tokenizer_->GetCurrentLine()) + ": " + err; in make_error()
87 for (auto token = tokenizer_->NextToken(); !token->IsEOS(); in Parse()
88 token = tokenizer_->NextToken()) { in Parse()
101 token = tokenizer_->NextToken(); in Parse()
127 token = tokenizer_->NextToken(); in Parse()
139 token = tokenizer_->NextToken(); in Parse()
150 token = tokenizer_->NextToken(); in Parse()
256 cmd->SetLine(tokenizer_->GetCurrentLine()); in ProcessDrawRect()
[all …]
Dcommand_parser.h166 std::unique_ptr<Tokenizer> tokenizer_; variable
/external/perfetto/src/trace_processor/importers/proto/
Dtrack_event_module.cc34 : tokenizer_(context), parser_(context) { in TrackEventModule()
51 return tokenizer_.TokenizeTrackDescriptorPacket(state, decoder, in TokenizePacket()
54 tokenizer_.TokenizeTrackEventPacket(state, decoder, packet, in TokenizePacket()
59 return tokenizer_.TokenizeThreadDescriptorPacket(state, decoder); in TokenizePacket()
Dtrack_event_module.h47 TrackEventTokenizer tokenizer_;
/external/protobuf/src/google/protobuf/
Dtext_format.cc267 tokenizer_(input_stream, &tokenizer_error_collector_), in ParserImpl()
280 tokenizer_.set_allow_f_after_float(true); in ParserImpl()
283 tokenizer_.set_comment_style(io::Tokenizer::SH_COMMENT_STYLE); in ParserImpl()
286 tokenizer_.set_require_space_after_number(false); in ParserImpl()
287 tokenizer_.set_allow_multiline_strings(true); in ParserImpl()
291 tokenizer_.Next(); in ParserImpl()
357 ReportError(tokenizer_.current().line, tokenizer_.current().column, in ReportError()
364 ReportWarning(tokenizer_.current().line, tokenizer_.current().column, in ReportWarning()
402 int start_line = tokenizer_.current().line; in ConsumeField()
403 int start_column = tokenizer_.current().column; in ConsumeField()
[all …]
/external/libtextclassifier/native/actions/
Dfeature-processor.h61 const Tokenizer* tokenizer() const { return tokenizer_.get(); } in tokenizer()
65 const std::unique_ptr<Tokenizer> tokenizer_; variable
Dngram-model.cc84 tokenizer_ = owned_tokenizer_.get(); in NGramModel()
86 tokenizer_ = tokenizer; in NGramModel()
151 const std::vector<Token> raw_tokens = tokenizer_->Tokenize(text); in Eval()
Dgrammar-actions.h52 const std::unique_ptr<Tokenizer> tokenizer_; variable
Dngram-model.h64 const Tokenizer* tokenizer_; variable
Dgrammar-actions.cc207 tokenizer_(CreateTokenizer(grammar_rules->tokenizer_options(), unilib)), in GrammarActions()
245 lexer_.Process(text, tokenizer_->Tokenize(text), in SuggestActions()
Dfeature-processor.cc71 tokenizer_(CreateTokenizer(options->tokenizer_options(), unilib)), in ActionsFeatureProcessor()
/external/perfetto/src/trace_processor/importers/ftrace/
Dftrace_module_impl.cc32 : tokenizer_(context), parser_(context) { in FtraceModuleImpl()
46 tokenizer_.TokenizeFtraceBundle(packet->slice(fld_off, ftrace_field.size)); in TokenizePacket()
Dftrace_module_impl.h52 FtraceTokenizer tokenizer_;
/external/libtextclassifier/native/annotator/number/
Dnumber.h44 tokenizer_(Tokenizer(TokenizationType_LETTER_DIGIT, unilib, in NumberAnnotator()
117 const Tokenizer tokenizer_; variable
Dnumber.cc224 const std::vector<Token> tokens = tokenizer_.Tokenize(context); in FindAll()
/external/libtextclassifier/native/lang_id/
Dlang-id.cc142 tokenizer_.Tokenize(text, &sentence); in FindLanguages()
212 tokenizer_.Setup(context); in Setup()
266 TokenizerForLangId tokenizer_; member in libtextclassifier3::mobile::lang_id::LangIdImpl
/external/libtextclassifier/native/annotator/grammar/dates/
Dcfg-datetime-annotator.cc63 tokenizer_(BuildTokenizer(unilib, tokenizer_options)), in CfgDatetimeAnnotator()
112 parser_.Parse(input.data(), tokenizer_.Tokenize(input), locales, in Parse()
Dcfg-datetime-annotator.h67 const Tokenizer tokenizer_; variable
/external/libtextclassifier/native/annotator/grammar/
Dgrammar-annotator.cc359 tokenizer_(BuildTokenizer(unilib, model->tokenizer_options())), in GrammarAnnotator()
385 lexer_.Process(text, tokenizer_.Tokenize(text), /*annotations=*/nullptr, in Annotate()
416 lexer_.Process(text, tokenizer_.Tokenize(text), /*annotations=*/nullptr, in SuggestSelection()
449 const std::vector<Token> tokens = tokenizer_.Tokenize(text); in ClassifyText()
Dgrammar-annotator.h65 const Tokenizer tokenizer_; variable
/external/libtextclassifier/native/utils/
Dtokenizer_test.cc79 tokenizer_ = std::unique_ptr<TestingTokenizer>(new TestingTokenizer( in TestingTokenizerProxy()
86 tokenizer_->FindTokenizationRange(c); in TestFindTokenizationRole()
95 return tokenizer_->Tokenize(utf8_text); in Tokenize()
101 std::unique_ptr<TestingTokenizer> tokenizer_; member in libtextclassifier3::__anonf2ccfc700111::TestingTokenizerProxy
/external/libtextclassifier/native/annotator/
Dfeature-processor.h99 tokenizer_(internal::BuildTokenizer(options, unilib)) { in FeatureProcessor()
322 Tokenizer tokenizer_; variable
Dfeature-processor.cc193 return tokenizer_.Tokenize(text); in Tokenize()
198 return tokenizer_.Tokenize(text_unicode); in Tokenize()