/third_party/protobuf/csharp/src/Google.Protobuf/ |
D | JsonParser.cs | 97 …private static void MergeWrapperField(JsonParser parser, IMessage message, JsonTokenizer tokenizer) in MergeWrapperField() 151 private void Merge(IMessage message, JsonTokenizer tokenizer) in Merge() 220 private void MergeField(IMessage message, FieldDescriptor field, JsonTokenizer tokenizer) in MergeField() 253 … private void MergeRepeatedField(IMessage message, FieldDescriptor field, JsonTokenizer tokenizer) in MergeRepeatedField() 279 private void MergeMapField(IMessage message, FieldDescriptor field, JsonTokenizer tokenizer) in MergeMapField() 326 private object ParseSingleValue(FieldDescriptor field, JsonTokenizer tokenizer) in ParseSingleValue() 445 private void MergeStructValue(IMessage message, JsonTokenizer tokenizer) in MergeStructValue() 487 private void MergeStruct(IMessage message, JsonTokenizer tokenizer) in MergeStruct() 500 private void MergeAny(IMessage message, JsonTokenizer tokenizer) in MergeAny() 565 private void MergeWellKnownTypeAnyBody(IMessage body, JsonTokenizer tokenizer) in MergeWellKnownTypeAnyBody()
|
/third_party/boost/libs/graph/src/ |
D | read_graphviz_new.cpp | 196 struct tokenizer struct 198 std::string::const_iterator begin, end; 199 std::vector< token > lookahead; 201 boost::regex stuff_to_skip; 202 boost::regex basic_id_token; 203 boost::regex punctuation_token; 204 boost::regex number_token; 205 boost::regex quoted_string_token; 206 boost::regex xml_tag_token; 207 boost::regex cdata; [all …]
|
/third_party/boost/libs/coroutine/example/asymmetric/ |
D | chaining.cpp | 143 coro_t::pull_type tokenizer(boost::bind(tokenize, _1, boost::ref(reader))); in main() local 155 coro_t::pull_type tokenizer(boost::bind(tokenize, _1, boost::ref(reader))); in main() local 168 coro_t::pull_type tokenizer(boost::bind(tokenize, _1, boost::ref(reader))); in main() local 181 coro_t::pull_type tokenizer(boost::bind(tokenize, _1, boost::ref(reader))); in main() local 192 coro_t::pull_type tokenizer(boost::bind(tokenize, _1, boost::ref(reader))); in main() local
|
/third_party/boost/boost/ |
D | tokenizer.hpp | 32 class tokenizer { class 53 tokenizer(Iterator first, Iterator last, in tokenizer() function in boost::tokenizer 58 tokenizer(const Container& c) in tokenizer() function in boost::tokenizer 62 tokenizer(const Container& c,const TokenizerFunc& f) in tokenizer() function in boost::tokenizer
|
/third_party/protobuf/python/google/protobuf/ |
D | text_format.py | 851 def _MergeField(self, tokenizer, message): argument 985 def _ConsumeAnyTypeUrl(self, tokenizer): argument 1000 def _MergeMessageField(self, tokenizer, message, field): argument 1064 def _MergeScalarField(self, tokenizer, message, field): argument 1140 def _SkipFieldContents(tokenizer): argument 1159 def _SkipField(tokenizer): argument 1182 def _SkipFieldMessage(tokenizer): argument 1201 def _SkipFieldValue(tokenizer): argument 1570 def _ConsumeInt32(tokenizer): argument 1585 def _ConsumeUint32(tokenizer): argument [all …]
|
/third_party/mindspore/tests/st/fl/albert/src/ |
D | dataset.py | 36 def convert_examples_to_features(examples, label_list, max_seq_length, tokenizer, cyclic_trunc=Fals… argument 78 def load_dataset(data_path, max_seq_length, tokenizer, batch_size, label_list=None, do_shuffle=True, argument 100 def load_datasets(data_dir, max_seq_length, tokenizer, batch_size, label_list=None, do_shuffle=True, argument 126 def create_ms_dataset(data_list, label_list, max_seq_length, tokenizer, batch_size, do_shuffle=True, argument
|
/third_party/protobuf/src/google/protobuf/io/ |
D | tokenizer_unittest.cc | 253 Tokenizer tokenizer(&input, &error_collector); in TEST_2D() local 297 Tokenizer tokenizer(&input, &error_collector); in TEST_1D() local 436 Tokenizer tokenizer(&input, &error_collector); in TEST_2D() local 501 Tokenizer tokenizer(&input, &error_collector); in TEST_1D() local 661 Tokenizer tokenizer(&input, &error_collector); in TEST_2D() local 920 Tokenizer tokenizer(&input, &error_collector); in TEST_2D() local 946 Tokenizer tokenizer(&input, &error_collector); in TEST_1D() local
|
/third_party/vk-gl-cts/framework/opengl/ |
D | gluVarTypeUtil.cpp | 92 VarTokenizer tokenizer(nameWithPath); in parseVariableName() local 99 VarTokenizer tokenizer(nameWithPath); in parseTypePath() local
|
/third_party/boost/libs/tokenizer/example/ |
D | char_sep_example_2.cpp | 21 tokenizer; in main() typedef
|
D | char_sep_example_1.cpp | 20 tokenizer; in main() typedef
|
/third_party/parse5/packages/parse5-sax-parser/test/ |
D | parser-feedback-simulator.test.js | 11 const tokenizer = new Tokenizer(); constant
|
/third_party/node/tools/inspector_protocol/encoding/ |
D | encoding_test.cc | 173 CBORTokenizer tokenizer(SpanFrom(encoded)); in TEST() local 191 CBORTokenizer tokenizer(SpanFrom(encoded)); in TEST() local 211 CBORTokenizer tokenizer(SpanFrom(encoded)); in TEST() local 230 CBORTokenizer tokenizer(SpanFrom(encoded)); in TEST() local 250 CBORTokenizer tokenizer(SpanFrom(encoded)); in TEST() local 277 CBORTokenizer tokenizer(SpanFrom(encoded)); in TEST() local 305 CBORTokenizer tokenizer(SpanFrom(test.data)); in TEST() local 324 CBORTokenizer tokenizer(SpanFrom(encoded)); in TEST() local 346 CBORTokenizer tokenizer(SpanFrom(encoded)); in TEST() local 368 CBORTokenizer tokenizer(SpanFrom(encoded)); in TEST() local [all …]
|
/third_party/parse5/packages/parse5/lib/extensions/error-reporting/ |
D | tokenizer-mixin.js | 8 constructor(tokenizer, opts) { argument
|
/third_party/mindspore/tests/st/networks/models/bert/src/ |
D | sample_process.py | 21 def process_one_example_p(tokenizer, text, max_seq_len=128): argument
|
/third_party/freetype/docs/reference/assets/javascripts/lunr/ |
D | lunr.th.js | 17 …function(){this.pipeline.reset(),this.pipeline.add(t.th.trimmer),i?this.tokenizer=t.th.tokenizer:(… function
|
D | lunr.ja.js | 17 …this.pipeline.add(m.ja.trimmer,m.ja.stopWordFilter,m.ja.stemmer),l?this.tokenizer=m.ja.tokenizer:(… function
|
/third_party/parse5/packages/parse5/test/ |
D | tokenizer.test.js | 14 const tokenizer = new Tokenizer(); constant
|
/third_party/parse5/packages/parse5-sax-parser/lib/ |
D | parser-feedback-simulator.js | 15 constructor(tokenizer) { argument
|
/third_party/python/Tools/scripts/ |
D | abitype.py | 161 tokenizer = re.compile( variable
|
/third_party/libphonenumber/java/internal/prefixmapper/src/com/google/i18n/phonenumbers/prefixmapper/ |
D | PrefixTimeZonesMap.java | 110 StringTokenizer tokenizer = new StringTokenizer(timezonesString, in tokenizeRawOutputString() local
|
/third_party/boost/libs/flyweight/example/ |
D | serialization.cpp | 44 > tokenizer; in save_serialization_file() typedef
|
/third_party/boost/boost/spirit/home/lex/lexer/lexertl/ |
D | functor_data.hpp | 155 typedef basic_iterator_tokeniser<Iterator> tokenizer; in next() typedef 255 typedef basic_iterator_tokeniser<Iterator> tokenizer; in next() typedef 348 typedef basic_iterator_tokeniser<Iterator> tokenizer; in lookahead() typedef 487 typedef basic_iterator_tokeniser<Iterator> tokenizer; in lookahead() typedef
|
/third_party/icu/icu4j/main/tests/core/src/com/ibm/icu/dev/test/util/ |
D | StringTokenizerTest.java | 158 StringTokenizer tokenizer = new StringTokenizer(str, delimiter); in TestSupplementary() local 240 StringTokenizer tokenizer = new StringTokenizer(str, delimiter); in TestNextNonDelimiterToken() local 342 StringTokenizer tokenizer = new StringTokenizer(str, delimiter, true, true); in TestNextDelimiterToken() local
|
/third_party/icu/ohos_icu4j/src/main/tests/ohos/global/icu/dev/test/util/ |
D | StringTokenizerTest.java | 161 StringTokenizer tokenizer = new StringTokenizer(str, delimiter); in TestSupplementary() local 243 StringTokenizer tokenizer = new StringTokenizer(str, delimiter); in TestNextNonDelimiterToken() local 345 StringTokenizer tokenizer = new StringTokenizer(str, delimiter, true, true); in TestNextDelimiterToken() local
|
/third_party/boost/boost/date_time/ |
D | time_parsing.hpp | 61 std::basic_string<char_type> > tokenizer; in str_from_delimited_time_duration() typedef 224 std::basic_string<char> > tokenizer; in parse_undelimited_time_duration() typedef
|