| /external/parameter-framework/upstream/test/tokenizer/ | 
| D | Test.cpp | 49             Tokenizer tokenizer("a bcd ef");  variable 58             Tokenizer tokenizer("");  variable 67             Tokenizer tokenizer("  a \n\t bc  ");  variable 77         Tokenizer tokenizer("/a/bcd/ef g/h/", "/");  variable 88             Tokenizer tokenizer("", Tokenizer::defaultDelimiters, false);  variable 97             Tokenizer tokenizer(",", ",", false);  variable 106             Tokenizer tokenizer(" a  b \nc d ", Tokenizer::defaultDelimiters, false);  variable
  | 
| /external/libtextclassifier/native/utils/ | 
| D | bert_tokenizer_test.cc | 32 void AssertTokenizerResults(std::unique_ptr<BertTokenizer> tokenizer) {  in AssertTokenizerResults() 41   auto tokenizer =  in TEST()  local 48   auto tokenizer =  in TEST()  local 60   auto tokenizer = std::make_unique<BertTokenizer>(vocab);  in TEST()  local 66   auto tokenizer =  in TEST()  local 76   auto tokenizer =  in TEST()  local 89   auto tokenizer =  in TEST()  local 110   auto tokenizer = std::make_unique<BertTokenizer>(vocab);  in TEST()  local 124   auto tokenizer = std::make_unique<BertTokenizer>(vocab);  in TEST()  local 145   auto tokenizer = std::make_unique<BertTokenizer>(vocab);  in TEST()  local [all …] 
 | 
| D | tokenizer_test.cc | 126   TestingTokenizerProxy tokenizer(TokenizationType_INTERNAL_TOKENIZER, configs,  in TEST()  local 173   TestingTokenizerProxy tokenizer(TokenizationType_INTERNAL_TOKENIZER, configs,  in TEST()  local 208   TestingTokenizerProxy tokenizer(TokenizationType_INTERNAL_TOKENIZER, configs,  in TEST()  local 344   TestingTokenizerProxy tokenizer(TokenizationType_INTERNAL_TOKENIZER, configs,  in TEST()  local 375   TestingTokenizerProxy tokenizer(TokenizationType_ICU, {}, {},  in TEST()  local 395   TestingTokenizerProxy tokenizer(TokenizationType_ICU, {}, {},  in TEST()  local 423   TestingTokenizerProxy tokenizer(TokenizationType_ICU, {}, {},  in TEST()  local 441   TestingTokenizerProxy tokenizer(TokenizationType_ICU, {}, {},  in TEST()  local 489   TestingTokenizerProxy tokenizer(TokenizationType_MIXED, configs,  in TEST()  local 520     TestingTokenizerProxy tokenizer(TokenizationType_INTERNAL_TOKENIZER,  in TEST()  local [all …] 
 | 
| /external/ktfmt/core/src/test/java/com/facebook/ktfmt/format/ | 
| D | TokenizerTest.kt | 37     val tokenizer = Tokenizer(code, file)  in PsiWhiteSpace are split to newlines and maximal-length whitespaces()  constant 63     val tokenizer = Tokenizer(code, file)  in Strings are returns as a single token()  constant 103     val tokenizer = Tokenizer(code, file)  in Token index is advanced after a string token()  constant
  | 
| /external/cronet/net/cert/ | 
| D | pem_unittest.cc | 20   PEMTokenizer tokenizer(string_piece, accepted_types);  in TEST()  local 38   PEMTokenizer tokenizer(string_piece, accepted_types);  in TEST()  local 56   PEMTokenizer tokenizer(string_piece, accepted_types);  in TEST()  local 73   PEMTokenizer tokenizer(string_piece, accepted_types);  in TEST()  local 98   PEMTokenizer tokenizer(string_piece, accepted_types);  in TEST()  local 122   PEMTokenizer tokenizer(string_piece, accepted_types);  in TEST()  local 139   PEMTokenizer tokenizer(string_piece, accepted_types);  in TEST()  local 160   PEMTokenizer tokenizer(string_piece, accepted_types);  in TEST()  local
  | 
| /external/tflite-support/tensorflow_lite_support/cc/test/text/ | 
| D | regex_tokenizer_test.cc | 26 namespace tokenizer {  namespace 46   auto tokenizer =  in TEST()  local 57   auto tokenizer =  in TEST()  local 67   auto tokenizer =  in TEST()  local 82   auto tokenizer =  in TEST()  local 101   auto tokenizer =  in TEST()  local 120   auto tokenizer =  in TEST()  local
  | 
| /external/perfetto/src/protozero/filtering/ | 
| D | message_tokenizer_unittest.cc | 70   MessageTokenizer tokenizer;  in TEST()  local 123   MessageTokenizer tokenizer;  in TEST()  local 147     MessageTokenizer tokenizer;  in TEST()  local 157     MessageTokenizer tokenizer;  in TEST()  local 169     MessageTokenizer tokenizer;  in TEST()  local
  | 
| /external/protobuf/csharp/src/Google.Protobuf/ | 
| D | JsonParser.cs | 97 …private static void MergeWrapperField(JsonParser parser, IMessage message, JsonTokenizer tokenizer)  in MergeWrapperField() 151         private void Merge(IMessage message, JsonTokenizer tokenizer)  in Merge() 220         private void MergeField(IMessage message, FieldDescriptor field, JsonTokenizer tokenizer)  in MergeField() 253 …  private void MergeRepeatedField(IMessage message, FieldDescriptor field, JsonTokenizer tokenizer)  in MergeRepeatedField() 279         private void MergeMapField(IMessage message, FieldDescriptor field, JsonTokenizer tokenizer)  in MergeMapField() 326         private object ParseSingleValue(FieldDescriptor field, JsonTokenizer tokenizer)  in ParseSingleValue() 445         private void MergeStructValue(IMessage message, JsonTokenizer tokenizer)  in MergeStructValue() 487         private void MergeStruct(IMessage message, JsonTokenizer tokenizer)  in MergeStruct() 500         private void MergeAny(IMessage message, JsonTokenizer tokenizer)  in MergeAny() 565         private void MergeWellKnownTypeAnyBody(IMessage body, JsonTokenizer tokenizer)  in MergeWellKnownTypeAnyBody()
  | 
| /external/cronet/third_party/protobuf/csharp/src/Google.Protobuf/ | 
| D | JsonParser.cs | 97 …private static void MergeWrapperField(JsonParser parser, IMessage message, JsonTokenizer tokenizer)  in MergeWrapperField() 151         private void Merge(IMessage message, JsonTokenizer tokenizer)  in Merge() 220         private void MergeField(IMessage message, FieldDescriptor field, JsonTokenizer tokenizer)  in MergeField() 253 …  private void MergeRepeatedField(IMessage message, FieldDescriptor field, JsonTokenizer tokenizer)  in MergeRepeatedField() 279         private void MergeMapField(IMessage message, FieldDescriptor field, JsonTokenizer tokenizer)  in MergeMapField() 326         private object ParseSingleValue(FieldDescriptor field, JsonTokenizer tokenizer)  in ParseSingleValue() 445         private void MergeStructValue(IMessage message, JsonTokenizer tokenizer)  in MergeStructValue() 487         private void MergeStruct(IMessage message, JsonTokenizer tokenizer)  in MergeStruct() 500         private void MergeAny(IMessage message, JsonTokenizer tokenizer)  in MergeAny() 565         private void MergeWellKnownTypeAnyBody(IMessage body, JsonTokenizer tokenizer)  in MergeWellKnownTypeAnyBody()
  | 
| /external/doclava/src/com/google/doclava/apicheck/ | 
| D | ApiFile.java | 70     final Tokenizer tokenizer = new Tokenizer(filename, (new String(buf, 0, size)).toCharArray());  in parseApi()  local 91   private static void parsePackage(ApiInfo api, Tokenizer tokenizer)  in parsePackage() 116   private static void parseClass(ApiInfo api, PackageInfo pkg, Tokenizer tokenizer, String token)  in parseClass() 232   private static void parseConstructor(Tokenizer tokenizer, ClassInfo cl, String token)  in parseConstructor() 287   private static void parseMethod(Tokenizer tokenizer, ClassInfo cl, String token)  in parseMethod() 378   private static void parseField(Tokenizer tokenizer, ClassInfo cl, String token, boolean isEnum)  in parseField() 507   private static void parseTypeParameterList(Tokenizer tokenizer,  in parseTypeParameterList() 546   private static void parseParameterList(Tokenizer tokenizer, AbstractMethodInfo method,  in parseParameterList() 583   private static String parseThrows(Tokenizer tokenizer, AbstractMethodInfo method)  in parseThrows() 615   public static void assertIdent(Tokenizer tokenizer, String token) throws ApiParseException {  in assertIdent()
  | 
| /external/protobuf/python/google/protobuf/ | 
| D | text_format.py | 866   def _MergeField(self, tokenizer, message):  argument 1009   def _ConsumeAnyTypeUrl(self, tokenizer):  argument 1024   def _MergeMessageField(self, tokenizer, message, field):  argument 1088   def _MergeScalarField(self, tokenizer, message, field):  argument 1164 def _SkipFieldContents(tokenizer):  argument 1186 def _SkipField(tokenizer):  argument 1209 def _SkipFieldMessage(tokenizer):  argument 1228 def _SkipFieldValue(tokenizer):  argument 1250 def _SkipRepeatedFieldValue(tokenizer):  argument 1608 def _ConsumeInt32(tokenizer):  argument [all …] 
 | 
| /external/cronet/third_party/protobuf/src/google/protobuf/io/ | 
| D | tokenizer_unittest.cc | 253   Tokenizer tokenizer(&input, &error_collector);  in TEST_2D()  local 297   Tokenizer tokenizer(&input, &error_collector);  in TEST_1D()  local 338     Tokenizer tokenizer(&input, &error_collector);  in TEST_2D()  local 347     Tokenizer tokenizer(&input, &error_collector);  in TEST_2D()  local 472   Tokenizer tokenizer(&input, &error_collector);  in TEST_2D()  local 545   Tokenizer tokenizer(&input, &error_collector);  in TEST_2D()  local 612   Tokenizer tokenizer(&input, &error_collector);  in TEST_1D()  local 772   Tokenizer tokenizer(&input, &error_collector);  in TEST_2D()  local 1034   Tokenizer tokenizer(&input, &error_collector);  in TEST_2D()  local 1060     Tokenizer tokenizer(&input, &error_collector);  in TEST_1D()  local
  | 
| /external/cronet/third_party/protobuf/python/google/protobuf/ | 
| D | text_format.py | 839   def _MergeField(self, tokenizer, message):  argument 979   def _ConsumeAnyTypeUrl(self, tokenizer):  argument 994   def _MergeMessageField(self, tokenizer, message, field):  argument 1058   def _MergeScalarField(self, tokenizer, message, field):  argument 1134 def _SkipFieldContents(tokenizer):  argument 1153 def _SkipField(tokenizer):  argument 1176 def _SkipFieldMessage(tokenizer):  argument 1195 def _SkipFieldValue(tokenizer):  argument 1561 def _ConsumeInt32(tokenizer):  argument 1576 def _ConsumeUint32(tokenizer):  argument [all …] 
 | 
| /external/antlr/runtime/CSharp2/Sources/Antlr3.Runtime/Antlr.Runtime.Tree/ | 
| D | TreePatternParser.cs | 37         protected TreePatternLexer tokenizer;  field in Antlr.Runtime.Tree.TreePatternParser 42 …    public TreePatternParser(TreePatternLexer tokenizer, TreeWizard wizard, ITreeAdaptor adaptor) {  in TreePatternParser()
  | 
| /external/protobuf/src/google/protobuf/io/ | 
| D | tokenizer_unittest.cc | 254   Tokenizer tokenizer(&input, &error_collector);  in TEST_2D()  local 298   Tokenizer tokenizer(&input, &error_collector);  in TEST_1D()  local 339     Tokenizer tokenizer(&input, &error_collector);  in TEST_2D()  local 348     Tokenizer tokenizer(&input, &error_collector);  in TEST_2D()  local 473   Tokenizer tokenizer(&input, &error_collector);  in TEST_2D()  local 546   Tokenizer tokenizer(&input, &error_collector);  in TEST_2D()  local 613   Tokenizer tokenizer(&input, &error_collector);  in TEST_1D()  local 773   Tokenizer tokenizer(&input, &error_collector);  in TEST_2D()  local 1136   Tokenizer tokenizer(&input, &error_collector);  in TEST_2D()  local 1162     Tokenizer tokenizer(&input, &error_collector);  in TEST_1D()  local
  | 
| /external/apache-commons-lang/src/test/java/org/apache/commons/lang3/text/ | 
| D | StrTokenizerTest.java | 46     private void checkClone(final StrTokenizer tokenizer) {  in checkClone() 47         assertNotSame(StrTokenizer.getCSVInstance(), tokenizer);  in checkClone()  local 48         assertNotSame(StrTokenizer.getTSVInstance(), tokenizer);  in checkClone()  local 534     void testEmpty(final StrTokenizer tokenizer) {  in testEmpty() 591         final StrTokenizer tokenizer = new StrTokenizer((char[]) null);  in testCloneNull()  local 606         final StrTokenizer tokenizer = new StrTokenizer(input);  in testCloneReset()  local 761     void testXSVAbc(final StrTokenizer tokenizer) {  in testXSVAbc()
  | 
| /external/tflite-support/tensorflow_lite_support/cc/text/tokenizers/ | 
| D | tokenizer_jni_lib.cc | 34   Tokenizer* tokenizer = reinterpret_cast<Tokenizer*>(handle);  in nativeTokenize()  local 65   Tokenizer* tokenizer = reinterpret_cast<Tokenizer*>(handle);  in nativeConvertTokensToIds()  local
  | 
| /external/antlr/runtime/Java/src/main/java/org/antlr/runtime/tree/ | 
| D | TreePatternParser.java | 34 	protected TreePatternLexer tokenizer;  field in TreePatternParser 39 	public TreePatternParser(TreePatternLexer tokenizer, TreeWizard wizard, TreeAdaptor adaptor) {  in TreePatternParser()
  | 
| /external/deqp-deps/amber/src/vkscript/ | 
| D | parser.cc | 43 std::string Parser::make_error(const Tokenizer& tokenizer,  in make_error() 152   Tokenizer tokenizer(section.contents);  in ProcessRequireBlock()  local 278   Tokenizer tokenizer(section.contents);  in ProcessIndicesBlock()  local 321   Tokenizer tokenizer(section.contents);  in ProcessVertexDataBlock()  local
  | 
| /external/deqp/framework/opengl/ | 
| D | gluVarTypeUtil.cpp | 92 	VarTokenizer tokenizer(nameWithPath);  in parseVariableName()  local 99 	VarTokenizer tokenizer(nameWithPath);  in parseTypePath()  local
  | 
| /external/cldr/tools/cldr-code/src/main/java/org/unicode/cldr/util/ | 
| D | XPathTokenizer.java | 134         XPathTokenizer tokenizer = new XPathTokenizer(xpath);  in relativeToAbsolute()  local 156         XPathTokenizer tokenizer = new XPathTokenizer(xpath);  in relativeToAbsolute()  local
  | 
| /external/pigweed/pw_tokenizer/ | 
| D | encode_args_test.cc | 20 namespace tokenizer {  namespace
  | 
| /external/libtextclassifier/native/actions/ | 
| D | feature-processor.h | 61   const Tokenizer* tokenizer() const { return tokenizer_.get(); }  in tokenizer()  function
  | 
| /external/antlr/runtime/ObjC/Framework/ | 
| D | TreePatternParser.h | 39 TreePatternLexer *tokenizer;  variable
  | 
| /external/cronet/net/http/ | 
| D | http_auth_challenge_tokenizer_fuzzer.cc | 12   net::HttpAuthChallengeTokenizer tokenizer(input.begin(), input.end());  in LLVMFuzzerTestOneInput()  local
  |