/third_party/protobuf/src/google/protobuf/io/ |
D | tokenizer_unittest.cc | 183 EXPECT_TRUE(Tokenizer::ParseInteger(text, kuint64max, &result)); in ParseInteger() 198 Tokenizer::TokenType type; 208 {"hello", Tokenizer::TYPE_IDENTIFIER}, 211 {"123", Tokenizer::TYPE_INTEGER}, 212 {"0xab6", Tokenizer::TYPE_INTEGER}, 213 {"0XAB6", Tokenizer::TYPE_INTEGER}, 214 {"0X1234567", Tokenizer::TYPE_INTEGER}, 215 {"0x89abcdef", Tokenizer::TYPE_INTEGER}, 216 {"0x89ABCDEF", Tokenizer::TYPE_INTEGER}, 217 {"01234567", Tokenizer::TYPE_INTEGER}, [all …]
|
D | tokenizer.cc | 195 Tokenizer::Tokenizer(ZeroCopyInputStream* input, in Tokenizer() function in google::protobuf::io::Tokenizer 219 Tokenizer::~Tokenizer() { in ~Tokenizer() 230 void Tokenizer::NextChar() { in NextChar() 251 void Tokenizer::Refresh() { in Refresh() 282 inline void Tokenizer::RecordTo(std::string* target) { in RecordTo() 287 inline void Tokenizer::StopRecording() { in StopRecording() 300 inline void Tokenizer::StartToken() { in StartToken() 308 inline void Tokenizer::EndToken() { in EndToken() 317 inline bool Tokenizer::LookingAt() { in LookingAt() 322 inline bool Tokenizer::TryConsumeOne() { in TryConsumeOne() [all …]
|
D | tokenizer.h | 56 class Tokenizer; variable 94 class PROTOBUF_EXPORT Tokenizer { 99 Tokenizer(ZeroCopyInputStream* input, ErrorCollector* error_collector); 100 ~Tokenizer(); 260 GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Tokenizer); 397 inline const Tokenizer::Token& Tokenizer::current() { return current_; } in current() 399 inline const Tokenizer::Token& Tokenizer::previous() { return previous_; } in previous() 401 inline void Tokenizer::ParseString(const std::string& text, in ParseString()
|
/third_party/gn/src/gn/ |
D | tokenizer_unittest.cc | 25 std::vector<Token> results = Tokenizer::Tokenize(&input_file, &err); in CheckTokenizer() 40 TEST(Tokenizer, Empty) { in TEST() argument 45 std::vector<Token> results = Tokenizer::Tokenize(&empty_string_input, &err); in TEST() 51 results = Tokenizer::Tokenize(&whitespace_input, &err); in TEST() 55 TEST(Tokenizer, Identifier) { in TEST() argument 60 TEST(Tokenizer, Integer) { in TEST() argument 66 TEST(Tokenizer, IntegerNoSpace) { in TEST() argument 72 TEST(Tokenizer, String) { in TEST() argument 80 TEST(Tokenizer, Operator) { in TEST() argument 103 TEST(Tokenizer, Scoper) { in TEST() argument [all …]
|
D | tokenizer.cc | 71 Tokenizer::Tokenizer(const InputFile* input_file, in Tokenizer() function in Tokenizer 79 Tokenizer::~Tokenizer() = default; 82 std::vector<Token> Tokenizer::Tokenize( in Tokenize() 86 Tokenizer t(input_file, err, whitespace_transform); in Tokenize() 90 std::vector<Token> Tokenizer::Run() { in Run() 159 size_t Tokenizer::ByteOffsetOfNthLine(std::string_view buf, int n) { in ByteOffsetOfNthLine() 179 bool Tokenizer::IsNewline(std::string_view buffer, size_t offset) { in IsNewline() 186 bool Tokenizer::IsIdentifierFirstChar(char c) { in IsIdentifierFirstChar() 191 bool Tokenizer::IsIdentifierContinuingChar(char c) { in IsIdentifierContinuingChar() 196 void Tokenizer::AdvanceToNextToken() { in AdvanceToNextToken() [all …]
|
D | tokenizer.h | 27 class Tokenizer { 56 Tokenizer(const InputFile* input_file, 59 ~Tokenizer(); 104 Tokenizer(const Tokenizer&) = delete; 105 Tokenizer& operator=(const Tokenizer&) = delete;
|
/third_party/vk-gl-cts/external/amber/src/src/ |
D | tokenizer_test.cc | 27 Tokenizer t(""); in TEST_F() 34 Tokenizer t("TestIdentifier"); in TEST_F() 46 Tokenizer t("123"); in TEST_F() 58 Tokenizer t("-123"); in TEST_F() 70 Tokenizer t("123.456"); in TEST_F() 84 Tokenizer t(nan_str); in TestNaN() 109 Tokenizer t("-123.456"); in TEST_F() 121 Tokenizer t(".123456"); in TEST_F() 133 Tokenizer t("BufferAccess32"); in TEST_F() 145 Tokenizer t("TestValue 123.456"); in TEST_F() [all …]
|
D | tokenizer.cc | 55 Tokenizer::Tokenizer(const std::string& data) : data_(data) {} in Tokenizer() function in amber::Tokenizer 57 Tokenizer::~Tokenizer() = default; 59 std::unique_ptr<Token> Tokenizer::NextToken() { in NextToken() 261 std::unique_ptr<Token> Tokenizer::PeekNextToken() { in PeekNextToken() 272 std::string Tokenizer::ExtractToNext(const std::string& str) { in ExtractToNext() 293 bool Tokenizer::IsWhitespace(char ch) { in IsWhitespace() 298 void Tokenizer::SkipWhitespace() { in SkipWhitespace() 305 void Tokenizer::SkipComment() { in SkipComment()
|
/third_party/vk-gl-cts/framework/xexml/ |
D | xeXMLParser.hpp | 83 class Tokenizer class 86 Tokenizer (void); 87 ~Tokenizer (void); 103 Tokenizer (const Tokenizer& other); 104 Tokenizer& operator= (const Tokenizer& other); 190 Tokenizer m_tokenizer; 203 inline void Tokenizer::getTokenStr (std::string& dst) const in getTokenStr() 211 inline void Tokenizer::appendTokenStr (std::string& dst) const in appendTokenStr()
|
D | xeXMLParser.cpp | 57 Tokenizer::Tokenizer (void) in Tokenizer() function in xe::xml::Tokenizer 65 Tokenizer::~Tokenizer (void) in ~Tokenizer() 69 void Tokenizer::clear (void) in clear() 77 void Tokenizer::error (const std::string& what) in error() 82 void Tokenizer::feed (const deUint8* bytes, int numBytes) in feed() 98 int Tokenizer::getChar (int offset) const in getChar() 108 void Tokenizer::advance (void) in advance() 338 void Tokenizer::getString (std::string& dst) const in getString()
|
/third_party/skia/third_party/externals/swiftshader/src/OpenGL/compiler/preprocessor/ |
D | Tokenizer.l | 315 Tokenizer::Tokenizer(Diagnostics *diagnostics) : mHandle(nullptr), mMaxTokenSize(1024) 320 Tokenizer::~Tokenizer() 325 bool Tokenizer::init(size_t count, const char * const string[], const int length[]) 334 void Tokenizer::setFileNumber(int file) 341 void Tokenizer::setLineNumber(int line) 346 void Tokenizer::setMaxTokenSize(size_t maxTokenSize) 351 void Tokenizer::lex(Token *token) 381 bool Tokenizer::initScanner() 390 void Tokenizer::destroyScanner()
|
D | Tokenizer.h | 27 class Tokenizer : public Lexer 44 Tokenizer(Diagnostics *diagnostics); 45 ~Tokenizer() override; 56 PP_DISALLOW_COPY_AND_ASSIGN(Tokenizer);
|
D | DirectiveParser.h | 28 class Tokenizer; variable 33 DirectiveParser(Tokenizer *tokenizer, 87 Tokenizer *mTokenizer;
|
/third_party/flutter/skia/third_party/externals/angle2/src/compiler/preprocessor/ |
D | Tokenizer.l | 309 Tokenizer::Tokenizer(Diagnostics *diagnostics) : mHandle(nullptr), mMaxTokenSize(256) 314 Tokenizer::~Tokenizer() 319 bool Tokenizer::init(size_t count, const char * const string[], const int length[]) 328 void Tokenizer::setFileNumber(int file) 335 void Tokenizer::setLineNumber(int line) 340 void Tokenizer::setMaxTokenSize(size_t maxTokenSize) 345 void Tokenizer::lex(Token *token) 375 bool Tokenizer::initScanner() 384 void Tokenizer::destroyScanner()
|
D | 64bit-tokenizer-safety.patch | 1 diff --git a/src/compiler/preprocessor/Tokenizer.cpp b/src/compiler/preprocessor/Tokenizer.cpp 3 --- a/src/compiler/preprocessor/Tokenizer.cpp 4 +++ b/src/compiler/preprocessor/Tokenizer.cpp
|
D | Tokenizer.h | 22 class Tokenizer : public Lexer 39 Tokenizer(Diagnostics *diagnostics); 40 ~Tokenizer() override;
|
D | DirectiveParser.h | 23 class Tokenizer; variable 28 DirectiveParser(Tokenizer *tokenizer, 76 Tokenizer *mTokenizer;
|
/third_party/skia/third_party/externals/angle2/src/compiler/preprocessor/ |
D | preprocessor.l | 317 Tokenizer::Tokenizer(Diagnostics *diagnostics) : mHandle(nullptr), mMaxTokenSize(256) 322 Tokenizer::~Tokenizer() 327 bool Tokenizer::init(size_t count, const char * const string[], const int length[]) 336 void Tokenizer::setFileNumber(int file) 343 void Tokenizer::setLineNumber(int line) 348 void Tokenizer::setMaxTokenSize(size_t maxTokenSize) 353 void Tokenizer::lex(Token *token) 383 bool Tokenizer::initScanner() 392 void Tokenizer::destroyScanner()
|
D | Tokenizer.h | 22 class Tokenizer : public Lexer 39 Tokenizer(Diagnostics *diagnostics); 40 ~Tokenizer() override;
|
D | DirectiveParser.h | 23 class Tokenizer; variable 28 DirectiveParser(Tokenizer *tokenizer, 76 Tokenizer *mTokenizer;
|
/third_party/protobuf/src/google/protobuf/compiler/ |
D | parser.cc | 200 inline bool Parser::LookingAtType(io::Tokenizer::TokenType token_type) { in LookingAtType() 204 inline bool Parser::AtEnd() { return LookingAtType(io::Tokenizer::TYPE_END); } in AtEnd() 234 if (LookingAtType(io::Tokenizer::TYPE_IDENTIFIER)) { in ConsumeIdentifier() 245 if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) { in ConsumeInteger() 247 if (!io::Tokenizer::ParseInteger(input_->current().text, kint32max, in ConsumeInteger() 277 if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) { in ConsumeInteger64() 278 if (!io::Tokenizer::ParseInteger(input_->current().text, max_value, in ConsumeInteger64() 293 if (LookingAtType(io::Tokenizer::TYPE_FLOAT)) { in ConsumeNumber() 294 *output = io::Tokenizer::ParseFloat(input_->current().text); in ConsumeNumber() 297 } else if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) { in ConsumeNumber() [all …]
|
/third_party/node/tools/gyp/tools/Xcode/Specifications/ |
D | gyp.xclangspec | 73 Tokenizer = "xcode.lang.gyp.lexer.toplevel"; 108 Tokenizer = "xcode.lang.gyp.lexer"; 121 Tokenizer = "xcode.lang.gyp.lexer"; 133 Tokenizer = "xcode.lang.gyp.lexer"; 144 Tokenizer = "xcode.lang.gyp.lexer"; 155 Tokenizer = "xcode.lang.gyp.lexer"; 168 Tokenizer = "xcode.lang.gyp.lexer"; 183 Tokenizer = "xcode.lang.gyp.lexer";
|
/third_party/node/deps/npm/node_modules/node-gyp/gyp/tools/Xcode/Specifications/ |
D | gyp.xclangspec | 73 Tokenizer = "xcode.lang.gyp.lexer.toplevel"; 108 Tokenizer = "xcode.lang.gyp.lexer"; 121 Tokenizer = "xcode.lang.gyp.lexer"; 133 Tokenizer = "xcode.lang.gyp.lexer"; 144 Tokenizer = "xcode.lang.gyp.lexer"; 155 Tokenizer = "xcode.lang.gyp.lexer"; 168 Tokenizer = "xcode.lang.gyp.lexer"; 183 Tokenizer = "xcode.lang.gyp.lexer";
|
/third_party/protobuf/src/google/protobuf/ |
D | text_format.cc | 269 tokenizer_.set_comment_style(io::Tokenizer::SH_COMMENT_STYLE); in ParserImpl() 288 if (LookingAtType(io::Tokenizer::TYPE_END)) { in Parse() 303 return suc && LookingAtType(io::Tokenizer::TYPE_END); in ParseField() 552 LookingAtType(io::Tokenizer::TYPE_STRING)) { in ConsumeField() 757 if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) { in ConsumeFieldValue() 783 if (LookingAtType(io::Tokenizer::TYPE_IDENTIFIER)) { in ConsumeFieldValue() 789 LookingAtType(io::Tokenizer::TYPE_INTEGER)) { in ConsumeFieldValue() 835 if (LookingAtType(io::Tokenizer::TYPE_STRING)) { in SkipFieldValue() 836 while (LookingAtType(io::Tokenizer::TYPE_STRING)) { in SkipFieldValue() 877 if (!LookingAtType(io::Tokenizer::TYPE_INTEGER) && in SkipFieldValue() [all …]
|
/third_party/python/Tools/peg_generator/pegen/ |
D | build.py | 16 from pegen.tokenizer import Tokenizer 107 ) -> Tuple[Grammar, Parser, Tokenizer]: 109 tokenizer = Tokenizer(tokenize.generate_tokens(file.readline), verbose=verbose_tokenizer) 196 ) -> Tuple[Grammar, Parser, Tokenizer, ParserGenerator]: 236 ) -> Tuple[Grammar, Parser, Tokenizer, ParserGenerator]:
|