/third_party/protobuf/csharp/src/Google.Protobuf.Test/ |
D | JsonTokenizerTest.cs | 88 var tokenizer = JsonTokenizer.FromTextReader(new StringReader(json)); in ObjectDepth() 90 Assert.AreEqual(0, tokenizer.ObjectDepth); in ObjectDepth() 91 Assert.AreEqual(JsonToken.StartObject, tokenizer.Next()); in ObjectDepth() 92 Assert.AreEqual(1, tokenizer.ObjectDepth); in ObjectDepth() 93 Assert.AreEqual(JsonToken.Name("foo"), tokenizer.Next()); in ObjectDepth() 94 Assert.AreEqual(1, tokenizer.ObjectDepth); in ObjectDepth() 95 Assert.AreEqual(JsonToken.StartObject, tokenizer.Next()); in ObjectDepth() 96 Assert.AreEqual(2, tokenizer.ObjectDepth); in ObjectDepth() 97 Assert.AreEqual(JsonToken.Name("x"), tokenizer.Next()); in ObjectDepth() 98 Assert.AreEqual(2, tokenizer.ObjectDepth); in ObjectDepth() [all …]
|
/third_party/protobuf/csharp/compatibility_tests/v3.0.0/src/Google.Protobuf.Test/ |
D | JsonTokenizerTest.cs | 88 var tokenizer = JsonTokenizer.FromTextReader(new StringReader(json)); in ObjectDepth() 90 Assert.AreEqual(0, tokenizer.ObjectDepth); in ObjectDepth() 91 Assert.AreEqual(JsonToken.StartObject, tokenizer.Next()); in ObjectDepth() 92 Assert.AreEqual(1, tokenizer.ObjectDepth); in ObjectDepth() 93 Assert.AreEqual(JsonToken.Name("foo"), tokenizer.Next()); in ObjectDepth() 94 Assert.AreEqual(1, tokenizer.ObjectDepth); in ObjectDepth() 95 Assert.AreEqual(JsonToken.StartObject, tokenizer.Next()); in ObjectDepth() 96 Assert.AreEqual(2, tokenizer.ObjectDepth); in ObjectDepth() 97 Assert.AreEqual(JsonToken.Name("x"), tokenizer.Next()); in ObjectDepth() 98 Assert.AreEqual(2, tokenizer.ObjectDepth); in ObjectDepth() [all …]
|
/third_party/protobuf/src/google/protobuf/io/ |
D | tokenizer_unittest.cc | 253 Tokenizer tokenizer(&input, &error_collector); in TEST_2D() local 256 EXPECT_EQ(Tokenizer::TYPE_START, tokenizer.current().type); in TEST_2D() 257 EXPECT_EQ("", tokenizer.current().text); in TEST_2D() 258 EXPECT_EQ(0, tokenizer.current().line); in TEST_2D() 259 EXPECT_EQ(0, tokenizer.current().column); in TEST_2D() 260 EXPECT_EQ(0, tokenizer.current().end_column); in TEST_2D() 263 ASSERT_TRUE(tokenizer.Next()); in TEST_2D() 266 EXPECT_EQ(kSimpleTokenCases_case.type, tokenizer.current().type); in TEST_2D() 268 EXPECT_EQ(kSimpleTokenCases_case.input, tokenizer.current().text); in TEST_2D() 270 EXPECT_EQ(0, tokenizer.current().line); in TEST_2D() [all …]
|
/third_party/protobuf/python/google/protobuf/ |
D | text_format.py | 847 tokenizer = Tokenizer(str_lines) 848 while not tokenizer.AtEnd(): 849 self._MergeField(tokenizer, message) 851 def _MergeField(self, tokenizer, message): argument 863 tokenizer.TryConsume('[')): 864 type_url_prefix, packed_type_name = self._ConsumeAnyTypeUrl(tokenizer) 865 tokenizer.Consume(']') 866 tokenizer.TryConsume(':') 867 if tokenizer.TryConsume('<'): 870 tokenizer.Consume('{') [all …]
|
/third_party/protobuf/csharp/src/Google.Protobuf/ |
D | JsonParser.cs | 76 …{ Timestamp.Descriptor.FullName, (parser, message, tokenizer) => MergeTimestamp(message, tokenizer… 77 …{ Duration.Descriptor.FullName, (parser, message, tokenizer) => MergeDuration(message, tokenizer.N… 78 …{ Value.Descriptor.FullName, (parser, message, tokenizer) => parser.MergeStructValue(message, toke… 79 { ListValue.Descriptor.FullName, (parser, message, tokenizer) => 80 …r.MergeRepeatedField(message, message.Descriptor.Fields[ListValue.ValuesFieldNumber], tokenizer) }, 81 …{ Struct.Descriptor.FullName, (parser, message, tokenizer) => parser.MergeStruct(message, tokenize… 82 … { Any.Descriptor.FullName, (parser, message, tokenizer) => parser.MergeAny(message, tokenizer) }, 83 …{ FieldMask.Descriptor.FullName, (parser, message, tokenizer) => MergeFieldMask(message, tokenizer… 97 …private static void MergeWrapperField(JsonParser parser, IMessage message, JsonTokenizer tokenizer) in MergeWrapperField() argument 99 …geField(message, message.Descriptor.Fields[WrappersReflection.WrapperValueFieldNumber], tokenizer); in MergeWrapperField() [all …]
|
/third_party/protobuf/python/google/protobuf/internal/ |
D | text_format_test.py | 1892 tokenizer = text_format.Tokenizer(text.splitlines()) 1893 methods = [(tokenizer.ConsumeIdentifier, 'identifier1'), ':', 1894 (tokenizer.ConsumeString, 'string1'), 1895 (tokenizer.ConsumeIdentifier, 'identifier2'), ':', 1896 (tokenizer.ConsumeInteger, 123), 1897 (tokenizer.ConsumeIdentifier, 'identifier3'), ':', 1898 (tokenizer.ConsumeString, 'string'), 1899 (tokenizer.ConsumeIdentifier, 'identifiER_4'), ':', 1900 (tokenizer.ConsumeFloat, 1.1e+2), 1901 (tokenizer.ConsumeIdentifier, 'ID5'), ':', [all …]
|
/third_party/vk-gl-cts/framework/opengl/ |
D | gluVarTypeUtil.cpp | 92 VarTokenizer tokenizer(nameWithPath); in parseVariableName() local 93 TCU_CHECK(tokenizer.getToken() == VarTokenizer::TOKEN_IDENTIFIER); in parseVariableName() 94 return tokenizer.getIdentifier(); in parseVariableName() 99 VarTokenizer tokenizer(nameWithPath); in parseTypePath() local 101 if (tokenizer.getToken() == VarTokenizer::TOKEN_IDENTIFIER) in parseTypePath() 102 tokenizer.advance(); in parseTypePath() 105 while (tokenizer.getToken() != VarTokenizer::TOKEN_END) in parseTypePath() 109 if (tokenizer.getToken() == VarTokenizer::TOKEN_PERIOD) in parseTypePath() 111 tokenizer.advance(); in parseTypePath() 112 TCU_CHECK(tokenizer.getToken() == VarTokenizer::TOKEN_IDENTIFIER); in parseTypePath() [all …]
|
/third_party/protobuf/java/core/src/main/java/com/google/protobuf/ |
D | TextFormat.java | 1753 final Tokenizer tokenizer = new Tokenizer(input); in merge() local 1758 while (!tokenizer.atEnd()) { in merge() 1759 mergeField(tokenizer, extensionRegistry, target, unknownFields); in merge() 1768 final Tokenizer tokenizer, in mergeField() argument 1774 tokenizer, in mergeField() 1783 final Tokenizer tokenizer, in mergeField() argument 1790 int startLine = tokenizer.getLine(); in mergeField() 1791 int startColumn = tokenizer.getColumn(); in mergeField() 1795 if ("google.protobuf.Any".equals(type.getFullName()) && tokenizer.tryConsume("[")) { in mergeField() 1796 mergeAnyFieldValue(tokenizer, extensionRegistry, target, parseTreeBuilder, unknownFields, in mergeField() [all …]
|
/third_party/icu/ohos_icu4j/src/main/tests/ohos/global/icu/dev/test/util/ |
D | StringTokenizerTest.java | 161 StringTokenizer tokenizer = new StringTokenizer(str, delimiter); in TestSupplementary() local 162 if (!tokenizer.nextElement().equals(expected[0])) { in TestSupplementary() 165 if (tokenizer.hasMoreElements()) { in TestSupplementary() 171 tokenizer = new StringTokenizer(str, delimiter); in TestSupplementary() 173 while (tokenizer.hasMoreElements()) { in TestSupplementary() 174 if (!tokenizer.nextElement().equals(expected1[i ++])) { in TestSupplementary() 178 if (tokenizer.hasMoreElements()) { in TestSupplementary() 185 tokenizer = new StringTokenizer(str, delimiter); in TestSupplementary() 186 if (!tokenizer.nextElement().equals(expected2[0])) { in TestSupplementary() 189 if (tokenizer.hasMoreElements()) { in TestSupplementary() [all …]
|
/third_party/icu/icu4j/main/tests/core/src/com/ibm/icu/dev/test/util/ |
D | StringTokenizerTest.java | 158 StringTokenizer tokenizer = new StringTokenizer(str, delimiter); in TestSupplementary() local 159 if (!tokenizer.nextElement().equals(expected[0])) { in TestSupplementary() 162 if (tokenizer.hasMoreElements()) { in TestSupplementary() 168 tokenizer = new StringTokenizer(str, delimiter); in TestSupplementary() 170 while (tokenizer.hasMoreElements()) { in TestSupplementary() 171 if (!tokenizer.nextElement().equals(expected1[i ++])) { in TestSupplementary() 175 if (tokenizer.hasMoreElements()) { in TestSupplementary() 182 tokenizer = new StringTokenizer(str, delimiter); in TestSupplementary() 183 if (!tokenizer.nextElement().equals(expected2[0])) { in TestSupplementary() 186 if (tokenizer.hasMoreElements()) { in TestSupplementary() [all …]
|
/third_party/node/tools/inspector_protocol/encoding/ |
D | encoding_test.cc | 173 CBORTokenizer tokenizer(SpanFrom(encoded)); in TEST() local 174 EXPECT_EQ(CBORTokenTag::INT32, tokenizer.TokenTag()); in TEST() 175 EXPECT_EQ(23, tokenizer.GetInt32()); in TEST() 176 tokenizer.Next(); in TEST() 177 EXPECT_EQ(CBORTokenTag::DONE, tokenizer.TokenTag()); in TEST() 191 CBORTokenizer tokenizer(SpanFrom(encoded)); in TEST() local 192 EXPECT_EQ(CBORTokenTag::INT32, tokenizer.TokenTag()); in TEST() 193 EXPECT_EQ(42, tokenizer.GetInt32()); in TEST() 194 tokenizer.Next(); in TEST() 195 EXPECT_EQ(CBORTokenTag::DONE, tokenizer.TokenTag()); in TEST() [all …]
|
D | encoding.cc | 926 CBORTokenizer* tokenizer, 929 CBORTokenizer* tokenizer, 932 CBORTokenizer* tokenizer, 935 void ParseUTF16String(CBORTokenizer* tokenizer, StreamingParserHandler* out) { in ParseUTF16String() argument 937 span<uint8_t> rep = tokenizer->GetString16WireRep(); in ParseUTF16String() 941 tokenizer->Next(); in ParseUTF16String() 944 bool ParseUTF8String(CBORTokenizer* tokenizer, StreamingParserHandler* out) { in ParseUTF8String() argument 945 assert(tokenizer->TokenTag() == CBORTokenTag::STRING8); in ParseUTF8String() 946 out->HandleString8(tokenizer->GetString8()); in ParseUTF8String() 947 tokenizer->Next(); in ParseUTF8String() [all …]
|
/third_party/node/tools/inspector_protocol/lib/ |
D | Values_cpp.template | 97 std::unique_ptr<DictionaryValue> parseMap(int32_t stack_depth, cbor::CBORTokenizer* tokenizer); 98 std::unique_ptr<ListValue> parseArray(int32_t stack_depth, cbor::CBORTokenizer* tokenizer); 99 std::unique_ptr<Value> parseValue(int32_t stack_depth, cbor::CBORTokenizer* tokenizer); 104 std::unique_ptr<ListValue> parseArray(int32_t stack_depth, cbor::CBORTokenizer* tokenizer) { 105 DCHECK(tokenizer->TokenTag() == cbor::CBORTokenTag::ARRAY_START); 106 tokenizer->Next(); 108 while (tokenizer->TokenTag() != cbor::CBORTokenTag::STOP) { 110 if (tokenizer->TokenTag() == cbor::CBORTokenTag::DONE) return nullptr; 111 if (tokenizer->TokenTag() == cbor::CBORTokenTag::ERROR_VALUE) return nullptr; 113 auto value = parseValue(stack_depth, tokenizer); [all …]
|
D | encoding_cpp.template | 934 CBORTokenizer* tokenizer, 937 CBORTokenizer* tokenizer, 940 CBORTokenizer* tokenizer, 943 void ParseUTF16String(CBORTokenizer* tokenizer, StreamingParserHandler* out) { 945 span<uint8_t> rep = tokenizer->GetString16WireRep(); 949 tokenizer->Next(); 952 bool ParseUTF8String(CBORTokenizer* tokenizer, StreamingParserHandler* out) { 953 assert(tokenizer->TokenTag() == CBORTokenTag::STRING8); 954 out->HandleString8(tokenizer->GetString8()); 955 tokenizer->Next(); [all …]
|
/third_party/freetype/docs/reference/assets/javascripts/lunr/min/ |
D | lunr.th.min.js | 1 …tokenizer=e.th.tokenizer:(e.tokenizer&&(e.tokenizer=e.th.tokenizer),this.tokenizerFn&&(this.tokeni… function
|
D | lunr.zh.min.js | 1 …,i?this.tokenizer=r.zh.tokenizer:(r.tokenizer&&(r.tokenizer=r.zh.tokenizer),this.tokenizerFn&&(thi… function
|
D | lunr.ja.min.js | 1 …tokenizer=e.ja.tokenizer:(e.tokenizer&&(e.tokenizer=e.ja.tokenizer),this.tokenizerFn&&(this.tokeni… function
|
/third_party/parse5/packages/parse5/lib/tokenizer/ |
D | tokenizer-location-info.test.ts | 168 const tokenizer = new Tokenizer({ sourceCodeLocationInfo: true }, handler); constant 171 tokenizer.preprocessor.bufferWaterline = 8; 172 tokenizer.state = testCase.initialMode; 173 tokenizer.lastStartTagName = testCase.lastStartTagName; 174 tokenizer.inForeignNode = !!testCase.inForeignNode; 177 tokenizer.write(htmlChunks[i], i === htmlChunks.length - 1); 181 assert.ok(!tokenizer.active);
|
/third_party/parse5/packages/parse5-sax-parser/lib/ |
D | parser-feedback-simulator.ts | 23 public tokenizer: Tokenizer; property in ParserFeedbackSimulator 26 this.tokenizer = new Tokenizer(options, this); 88 this.tokenizer.inForeignNode = this.inForeignContent; 94 this.tokenizer.inForeignNode = this.inForeignContent; 102 this.tokenizer.state = TokenizerMode.RCDATA; 106 this.tokenizer.state = TokenizerMode.PLAINTEXT; 110 this.tokenizer.state = TokenizerMode.SCRIPT_DATA; 119 this.tokenizer.state = TokenizerMode.RAWTEXT;
|
D | index.ts | 48 protected tokenizer: Tokenizer; property in SAXParser 62 this.tokenizer = this.parserFeedbackSimulator.tokenizer; 118 this.tokenizer.pause(); 124 this.tokenizer.write(chunk, this.lastChunkWritten); 147 if (this.tokenizer.preprocessor.willDropParsedChunk()) {
|
/third_party/skia/third_party/externals/angle2/src/compiler/preprocessor/ |
D | Preprocessor.cpp | 27 Tokenizer tokenizer; member 35 tokenizer(diag), in PreprocessorImpl() 36 directiveParser(&tokenizer, ¯oSet, diag, directiveHandler, settings), in PreprocessorImpl() 63 return mImpl->tokenizer.init(count, string, length); in init() 102 mImpl->tokenizer.setMaxTokenSize(maxTokenSize); in setMaxTokenSize()
|
/third_party/skia/third_party/externals/swiftshader/src/OpenGL/compiler/preprocessor/ |
D | Preprocessor.cpp | 33 Tokenizer tokenizer; member 41 tokenizer(diag), in PreprocessorImpl() 42 … directiveParser(&tokenizer, ¯oSet, diag, directiveHandler, settings.maxMacroExpansionDepth), in PreprocessorImpl() 70 return mImpl->tokenizer.init(count, string, length); in init() 109 mImpl->tokenizer.setMaxTokenSize(maxTokenSize); in setMaxTokenSize()
|
/third_party/flutter/skia/third_party/externals/angle2/src/compiler/preprocessor/ |
D | Preprocessor.cpp | 27 Tokenizer tokenizer; member 35 tokenizer(diag), in PreprocessorImpl() 36 directiveParser(&tokenizer, ¯oSet, diag, directiveHandler, settings), in PreprocessorImpl() 63 return mImpl->tokenizer.init(count, string, length); in init() 102 mImpl->tokenizer.setMaxTokenSize(maxTokenSize); in setMaxTokenSize()
|
/third_party/python/Tools/peg_generator/pegen/ |
D | __main__.py | 29 grammar, parser, tokenizer, gen = build_c_parser_and_generator( 40 return grammar, parser, tokenizer, gen 58 grammar, parser, tokenizer, gen = build_python_parser_and_generator( 65 return grammar, parser, tokenizer, gen 129 grammar, parser, tokenizer, gen = args.func(args) 165 diag = tokenizer.diagnose()
|
D | build.py | 16 from pegen.tokenizer import Tokenizer 109 tokenizer = Tokenizer(tokenize.generate_tokens(file.readline), verbose=verbose_tokenizer) 110 parser = GrammarParser(tokenizer, verbose=verbose_parser) 116 return grammar, parser, tokenizer 215 grammar, parser, tokenizer = build_parser(grammar_file, verbose_tokenizer, verbose_parser) 227 return grammar, parser, tokenizer, gen 248 grammar, parser, tokenizer = build_parser(grammar_file, verbose_tokenizer, verbose_parser) 250 return grammar, parser, tokenizer, gen
|