Home
last modified time | relevance | path

Searched refs:tokenizer (Results 1 – 25 of 30) sorted by relevance

12

/frameworks/opt/gamesdk/third_party/protobuf-3.0.0/csharp/src/Google.Protobuf.Test/
DJsonTokenizerTest.cs88 var tokenizer = JsonTokenizer.FromTextReader(new StringReader(json)); in ObjectDepth()
90 Assert.AreEqual(0, tokenizer.ObjectDepth); in ObjectDepth()
91 Assert.AreEqual(JsonToken.StartObject, tokenizer.Next()); in ObjectDepth()
92 Assert.AreEqual(1, tokenizer.ObjectDepth); in ObjectDepth()
93 Assert.AreEqual(JsonToken.Name("foo"), tokenizer.Next()); in ObjectDepth()
94 Assert.AreEqual(1, tokenizer.ObjectDepth); in ObjectDepth()
95 Assert.AreEqual(JsonToken.StartObject, tokenizer.Next()); in ObjectDepth()
96 Assert.AreEqual(2, tokenizer.ObjectDepth); in ObjectDepth()
97 Assert.AreEqual(JsonToken.Name("x"), tokenizer.Next()); in ObjectDepth()
98 Assert.AreEqual(2, tokenizer.ObjectDepth); in ObjectDepth()
[all …]
/frameworks/opt/gamesdk/third_party/protobuf-3.0.0/src/google/protobuf/io/
Dtokenizer_unittest.cc255 Tokenizer tokenizer(&input, &error_collector); in TEST_2D() local
258 EXPECT_EQ(Tokenizer::TYPE_START, tokenizer.current().type); in TEST_2D()
259 EXPECT_EQ("", tokenizer.current().text); in TEST_2D()
260 EXPECT_EQ(0, tokenizer.current().line); in TEST_2D()
261 EXPECT_EQ(0, tokenizer.current().column); in TEST_2D()
262 EXPECT_EQ(0, tokenizer.current().end_column); in TEST_2D()
265 ASSERT_TRUE(tokenizer.Next()); in TEST_2D()
268 EXPECT_EQ(kSimpleTokenCases_case.type, tokenizer.current().type); in TEST_2D()
270 EXPECT_EQ(kSimpleTokenCases_case.input, tokenizer.current().text); in TEST_2D()
272 EXPECT_EQ(0, tokenizer.current().line); in TEST_2D()
[all …]
/frameworks/opt/gamesdk/third_party/protobuf-3.0.0/python/google/protobuf/
Dtext_format.py573 tokenizer = Tokenizer(lines)
574 while not tokenizer.AtEnd():
575 self._MergeField(tokenizer, message)
577 def _MergeField(self, tokenizer, message): argument
593 if tokenizer.TryConsume('['):
594 name = [tokenizer.ConsumeIdentifier()]
595 while tokenizer.TryConsume('.'):
596 name.append(tokenizer.ConsumeIdentifier())
600 raise tokenizer.ParseErrorPreviousToken(
610 raise tokenizer.ParseErrorPreviousToken(
[all …]
/frameworks/opt/gamesdk/third_party/protobuf-3.0.0/csharp/src/Google.Protobuf/
DJsonParser.cs74 …{ Timestamp.Descriptor.FullName, (parser, message, tokenizer) => MergeTimestamp(message, tokenizer
75 …{ Duration.Descriptor.FullName, (parser, message, tokenizer) => MergeDuration(message, tokenizer.N…
76 …{ Value.Descriptor.FullName, (parser, message, tokenizer) => parser.MergeStructValue(message, toke…
77 { ListValue.Descriptor.FullName, (parser, message, tokenizer) =>
78 …r.MergeRepeatedField(message, message.Descriptor.Fields[ListValue.ValuesFieldNumber], tokenizer) },
79 …{ Struct.Descriptor.FullName, (parser, message, tokenizer) => parser.MergeStruct(message, tokenize…
80 … { Any.Descriptor.FullName, (parser, message, tokenizer) => parser.MergeAny(message, tokenizer) },
81 …{ FieldMask.Descriptor.FullName, (parser, message, tokenizer) => MergeFieldMask(message, tokenizer
95 …private static void MergeWrapperField(JsonParser parser, IMessage message, JsonTokenizer tokenizer) in MergeWrapperField() argument
97 …geField(message, message.Descriptor.Fields[WrappersReflection.WrapperValueFieldNumber], tokenizer); in MergeWrapperField()
[all …]
/frameworks/opt/gamesdk/third_party/protobuf-3.0.0/python/google/protobuf/internal/
Dtext_format_test.py1188 tokenizer = text_format.Tokenizer(text.splitlines())
1189 methods = [(tokenizer.ConsumeIdentifier, 'identifier1'), ':',
1190 (tokenizer.ConsumeString, 'string1'),
1191 (tokenizer.ConsumeIdentifier, 'identifier2'), ':',
1192 (tokenizer.ConsumeInteger, 123),
1193 (tokenizer.ConsumeIdentifier, 'identifier3'), ':',
1194 (tokenizer.ConsumeString, 'string'),
1195 (tokenizer.ConsumeIdentifier, 'identifiER_4'), ':',
1196 (tokenizer.ConsumeFloat, 1.1e+2),
1197 (tokenizer.ConsumeIdentifier, 'ID5'), ':',
[all …]
/frameworks/opt/gamesdk/third_party/protobuf-3.0.0/java/core/src/main/java/com/google/protobuf/
DTextFormat.java1416 final Tokenizer tokenizer = new Tokenizer(input); in merge() local
1422 while (!tokenizer.atEnd()) { in merge()
1423 mergeField(tokenizer, extensionRegistry, target, unknownFields); in merge()
1434 private void mergeField(final Tokenizer tokenizer, in mergeField() argument
1439 mergeField(tokenizer, extensionRegistry, target, parseInfoTreeBuilder, in mergeField()
1447 private void mergeField(final Tokenizer tokenizer, in mergeField() argument
1454 int startLine = tokenizer.getLine(); in mergeField()
1455 int startColumn = tokenizer.getColumn(); in mergeField()
1459 if (tokenizer.tryConsume("[")) { in mergeField()
1462 new StringBuilder(tokenizer.consumeIdentifier()); in mergeField()
[all …]
/frameworks/base/tools/aapt2/util/
DUtil_test.cpp45 auto tokenizer = util::Tokenize(StringPiece("this| is|the|end"), '|'); in TEST() local
46 auto iter = tokenizer.begin(); in TEST()
55 ASSERT_THAT(iter, Eq(tokenizer.end())); in TEST()
59 auto tokenizer = util::Tokenize(StringPiece(""), '|'); in TEST() local
60 auto iter = tokenizer.begin(); in TEST()
61 ASSERT_THAT(iter, Ne(tokenizer.end())); in TEST()
64 ASSERT_THAT(iter, Eq(tokenizer.end())); in TEST()
68 auto tokenizer = util::Tokenize(StringPiece("one."), '.'); in TEST() local
69 auto iter = tokenizer.begin(); in TEST()
72 ASSERT_THAT(iter, Ne(tokenizer.end())); in TEST()
/frameworks/native/libs/input/
DVirtualKeyMap.cpp52 std::unique_ptr<Tokenizer> tokenizer(t); in load() local
60 Parser parser(map.get(), tokenizer.get()); in load()
72 VirtualKeyMap::Parser::Parser(VirtualKeyMap* map, Tokenizer* tokenizer) : in Parser() argument
73 mMap(map), mTokenizer(tokenizer) { in Parser()
DKeyLayoutMap.cpp55 Tokenizer* tokenizer; in load() local
56 status_t status = Tokenizer::open(String8(filename.c_str()), &tokenizer); in load()
68 Parser parser(map.get(), tokenizer); in load()
73 tokenizer->getFilename().string(), tokenizer->getLineNumber(), in load()
80 delete tokenizer; in load()
192 KeyLayoutMap::Parser::Parser(KeyLayoutMap* map, Tokenizer* tokenizer) : in Parser() argument
193 mMap(map), mTokenizer(tokenizer) { in Parser()
DKeyCharacterMap.cpp113 Tokenizer* tokenizer; in load() local
114 status_t status = Tokenizer::open(String8(filename.c_str()), &tokenizer); in load()
118 status = load(tokenizer, format, outMap); in load()
119 delete tokenizer; in load()
128 Tokenizer* tokenizer; in loadContents() local
129 status_t status = Tokenizer::fromContents(String8(filename.c_str()), contents, &tokenizer); in loadContents()
133 status = load(tokenizer, format, outMap); in loadContents()
134 delete tokenizer; in loadContents()
139 status_t KeyCharacterMap::load(Tokenizer* tokenizer, in load() argument
150 Parser parser(map.get(), tokenizer, format); in load()
[all …]
/frameworks/native/include/input/
DVirtualKeyMap.h65 Parser(VirtualKeyMap* map, Tokenizer* tokenizer);
DKeyCharacterMap.h212 Parser(KeyCharacterMap* map, Tokenizer* tokenizer, Format format);
245 static status_t load(Tokenizer* tokenizer, Format format, sp<KeyCharacterMap>* outMap);
DKeyLayoutMap.h104 Parser(KeyLayoutMap* map, Tokenizer* tokenizer);
/frameworks/base/core/java/android/net/
DUrlQuerySanitizer.java621 StringTokenizer tokenizer = new StringTokenizer(query, "&"); in parseQuery() local
622 while(tokenizer.hasMoreElements()) { in parseQuery()
623 String attributeValuePair = tokenizer.nextToken(); in parseQuery()
/frameworks/multidex/library/src/androidx/multidex/
DMultiDex.java336 StringTokenizer tokenizer = new StringTokenizer(versionString, "."); in isVMMultidexCapable() local
337 String majorToken = tokenizer.hasMoreTokens() ? tokenizer.nextToken() : null; in isVMMultidexCapable()
338 String minorToken = tokenizer.hasMoreTokens() ? tokenizer.nextToken() : null; in isVMMultidexCapable()
/frameworks/base/core/java/android/util/jar/
DStrictJarVerifier.java399 StringTokenizer tokenizer = new StringTokenizer(apkSignatureSchemeIdList, ","); in verifyCertificate() local
400 while (tokenizer.hasMoreTokens()) { in verifyCertificate()
401 String idText = tokenizer.nextToken().trim(); in verifyCertificate()
/frameworks/native/cmds/atrace/
Datrace.cpp827 Tokenizer* tokenizer = nullptr; in setCategoriesEnableFromFile() local
828 if (Tokenizer::open(String8(categories_file), &tokenizer) != NO_ERROR) { in setCategoriesEnableFromFile()
832 while (!tokenizer->isEol()) { in setCategoriesEnableFromFile()
833 String8 token = tokenizer->nextToken(" "); in setCategoriesEnableFromFile()
835 tokenizer->skipDelimiters(" "); in setCategoriesEnableFromFile()
840 delete tokenizer; in setCategoriesEnableFromFile()
/frameworks/base/core/tests/coretests/src/android/text/
DTextUtilsTest.java270 Rfc822Tokenizer tokenizer = new Rfc822Tokenizer(); in testRfc822FindToken() local
274 assertEquals(0, tokenizer.findTokenStart(address, 21)); in testRfc822FindToken()
275 assertEquals(22, tokenizer.findTokenEnd(address, 21)); in testRfc822FindToken()
276 assertEquals(24, tokenizer.findTokenStart(address, 25)); in testRfc822FindToken()
277 assertEquals(46, tokenizer.findTokenEnd(address, 25)); in testRfc822FindToken()
/frameworks/opt/gamesdk/third_party/protobuf-3.0.0/src/google/protobuf/compiler/
Dimporter.cc139 io::Tokenizer tokenizer(input.get(), &file_error_collector); in FindFileByName() local
151 return parser.Parse(&tokenizer, output) && in FindFileByName()
/frameworks/opt/gamesdk/third_party/protobuf-3.0.0/cmake/
Dlibprotobuf.cmake19 ${protobuf_source_dir}/src/google/protobuf/io/tokenizer.cc
Dextract_includes.bat.in55 …OTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\io\tokenizer.h include\google\protobuf\io\tokeniz…
/frameworks/ex/camera2/portability/src/com/android/ex/camera2/portability/
DAndroidCameraAgentImpl.java1039 StringTokenizer tokenizer = new StringTokenizer(flattened, ";"); in dumpDeviceSettings() local
1041 while (tokenizer.hasMoreElements()) { in dumpDeviceSettings()
1042 dumpedSettings += tokenizer.nextToken() + '\n'; in dumpDeviceSettings()
/frameworks/opt/gamesdk/src/protobuf/
Dprotobuf.cmake154 ${GP_SRC_DIR}/io/tokenizer.cc
/frameworks/opt/gamesdk/third_party/protobuf-3.0.0/src/
DMakefile.am142 google/protobuf/io/tokenizer.h \
250 google/protobuf/io/tokenizer.cc \
/frameworks/opt/gamesdk/third_party/protobuf-3.0.0/
DBUILD132 "src/google/protobuf/io/tokenizer.cc",

12