| /third_party/gn/src/gn/ |
| D | tokenizer_unittest.cc | 40 TEST(Tokenizer, Empty) { in TEST() argument 55 TEST(Tokenizer, Identifier) { in TEST() argument 60 TEST(Tokenizer, Integer) { in TEST() argument 66 TEST(Tokenizer, IntegerNoSpace) { in TEST() argument 72 TEST(Tokenizer, String) { in TEST() argument 80 TEST(Tokenizer, Operator) { in TEST() argument 103 TEST(Tokenizer, Scoper) { in TEST() argument 112 TEST(Tokenizer, FunctionCall) { in TEST() argument 123 TEST(Tokenizer, Locations) { in TEST() argument 136 TEST(Tokenizer, ByteOffsetOfNthLine) { in TEST() argument [all …]
|
| D | tokenizer.cc | 71 Tokenizer::Tokenizer(const InputFile* input_file, in Tokenizer() function in Tokenizer
|
| /third_party/parse5/packages/parse5-sax-parser/test/ |
| D | parser-feedback-simulator.test.js | 2 const Tokenizer = require('parse5/lib/tokenizer'); constant
|
| /third_party/parse5/packages/parse5/test/ |
| D | tokenizer.test.js | 4 const Tokenizer = require('../lib/tokenizer'); constant
|
| D | location-info-tokenizer.test.js | 4 const Tokenizer = require('../lib/tokenizer'); constant
|
| /third_party/parse5/packages/parse5-sax-parser/lib/ |
| D | parser-feedback-simulator.js | 3 const Tokenizer = require('parse5/lib/tokenizer'); constant
|
| D | index.js | 4 const Tokenizer = require('parse5/lib/tokenizer'); constant
|
| /third_party/parse5/scripts/generate-parser-feedback-test/ |
| D | index.js | 5 const Tokenizer = require('../../packages/parse5/lib/tokenizer'); constant
|
| /third_party/parse5/packages/parse5/lib/extensions/location-info/ |
| D | parser-mixin.js | 4 const Tokenizer = require('../../tokenizer'); constant
|
| D | tokenizer-mixin.js | 4 const Tokenizer = require('../../tokenizer'); constant
|
| /third_party/python/Tools/peg_generator/pegen/ |
| D | tokenizer.py | 14 class Tokenizer: class
|
| D | parser.py | 161 def __init__(self, tokenizer: Tokenizer, *, verbose: bool = False):
|
| /third_party/python/Doc/library/ |
| D | re.rst | 1705 .. [Frie09] Friedl, Jeffrey. Mastering Regular Expressions. 3rd ed., O'Reilly
|
| /third_party/skia/third_party/externals/angle2/src/compiler/preprocessor/ |
| D | DirectiveParser.h | 23 class Tokenizer; variable
|
| /third_party/flutter/skia/third_party/externals/angle2/src/compiler/preprocessor/ |
| D | DirectiveParser.h | 23 class Tokenizer; variable
|
| /third_party/skia/third_party/externals/swiftshader/src/OpenGL/compiler/preprocessor/ |
| D | DirectiveParser.h | 28 class Tokenizer; variable
|
| /third_party/parse5/test/utils/ |
| D | generate-tokenization-tests.js | 6 const Tokenizer = require('../../packages/parse5/lib/tokenizer'); constant
|
| /third_party/vk-gl-cts/framework/xexml/ |
| D | xeXMLParser.cpp | 57 Tokenizer::Tokenizer (void) in Tokenizer() function in xe::xml::Tokenizer
|
| D | xeXMLParser.hpp | 83 class Tokenizer class
|
| /third_party/protobuf/src/google/protobuf/io/ |
| D | tokenizer.h | 56 class Tokenizer; variable
|
| D | tokenizer.cc | 195 Tokenizer::Tokenizer(ZeroCopyInputStream* input, in Tokenizer() function in google::protobuf::io::Tokenizer
|
| /third_party/python/Lib/ |
| D | sre_parse.py | 223 class Tokenizer: class
|
| /third_party/parse5/packages/parse5/lib/common/ |
| D | foreign-content.js | 3 const Tokenizer = require('../tokenizer'); constant
|
| /third_party/protobuf/java/core/src/main/java/com/google/protobuf/ |
| D | TextFormat.java | 900 private static final class Tokenizer { class in TextFormat 935 private Tokenizer(final CharSequence text) { in Tokenizer() method in TextFormat.Tokenizer
|
| /third_party/skia/third_party/externals/swiftshader/third_party/llvm-subzero/lib/Support/ |
| D | CommandLine.cpp | 892 TokenizerCallback Tokenizer, in ExpandResponseFile() 948 bool cl::ExpandResponseFiles(StringSaver &Saver, TokenizerCallback Tokenizer, in ExpandResponseFiles()
|