Home
last modified time | relevance | path

Searched refs:Tokenizer (Results 1 – 25 of 105) sorted by relevance

12345

/external/protobuf/src/google/protobuf/io/
Dtokenizer_unittest.cc182 EXPECT_TRUE(Tokenizer::ParseInteger(text, kuint64max, &result)); in ParseInteger()
197 Tokenizer::TokenType type;
207 { "hello", Tokenizer::TYPE_IDENTIFIER },
210 { "123", Tokenizer::TYPE_INTEGER },
211 { "0xab6", Tokenizer::TYPE_INTEGER },
212 { "0XAB6", Tokenizer::TYPE_INTEGER },
213 { "0X1234567", Tokenizer::TYPE_INTEGER },
214 { "0x89abcdef", Tokenizer::TYPE_INTEGER },
215 { "0x89ABCDEF", Tokenizer::TYPE_INTEGER },
216 { "01234567", Tokenizer::TYPE_INTEGER },
[all …]
Dtokenizer.cc180 Tokenizer::Tokenizer(ZeroCopyInputStream* input, in Tokenizer() function in google::protobuf::io::Tokenizer
201 Tokenizer::~Tokenizer() { in ~Tokenizer()
212 void Tokenizer::NextChar() { in NextChar()
233 void Tokenizer::Refresh() { in Refresh()
263 inline void Tokenizer::StartToken() { in StartToken()
271 inline void Tokenizer::EndToken() { in EndToken()
286 inline bool Tokenizer::LookingAt() { in LookingAt()
291 inline bool Tokenizer::TryConsumeOne() { in TryConsumeOne()
300 inline bool Tokenizer::TryConsume(char c) { in TryConsume()
310 inline void Tokenizer::ConsumeZeroOrMore() { in ConsumeZeroOrMore()
[all …]
Dtokenizer.h51 class Tokenizer; variable
81 class LIBPROTOBUF_EXPORT Tokenizer {
86 Tokenizer(ZeroCopyInputStream* input, ErrorCollector* error_collector);
87 ~Tokenizer();
180 GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Tokenizer);
290 inline const Tokenizer::Token& Tokenizer::current() { in current()
294 inline void Tokenizer::ParseString(const string& text, string* output) { in ParseString()
/external/chromium_org/third_party/protobuf/src/google/protobuf/io/
Dtokenizer_unittest.cc183 EXPECT_TRUE(Tokenizer::ParseInteger(text, kuint64max, &result)); in ParseInteger()
198 Tokenizer::TokenType type;
208 { "hello", Tokenizer::TYPE_IDENTIFIER },
211 { "123", Tokenizer::TYPE_INTEGER },
212 { "0xab6", Tokenizer::TYPE_INTEGER },
213 { "0XAB6", Tokenizer::TYPE_INTEGER },
214 { "0X1234567", Tokenizer::TYPE_INTEGER },
215 { "0x89abcdef", Tokenizer::TYPE_INTEGER },
216 { "0x89ABCDEF", Tokenizer::TYPE_INTEGER },
217 { "01234567", Tokenizer::TYPE_INTEGER },
[all …]
Dtokenizer.h52 class Tokenizer; variable
82 class LIBPROTOBUF_EXPORT Tokenizer {
87 Tokenizer(ZeroCopyInputStream* input, ErrorCollector* error_collector);
88 ~Tokenizer();
233 GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Tokenizer);
367 inline const Tokenizer::Token& Tokenizer::current() { in current()
371 inline const Tokenizer::Token& Tokenizer::previous() { in previous()
375 inline void Tokenizer::ParseString(const string& text, string* output) { in ParseString()
Dtokenizer.cc185 Tokenizer::Tokenizer(ZeroCopyInputStream* input, in Tokenizer() function in google::protobuf::io::Tokenizer
208 Tokenizer::~Tokenizer() { in ~Tokenizer()
219 void Tokenizer::NextChar() { in NextChar()
240 void Tokenizer::Refresh() { in Refresh()
270 inline void Tokenizer::RecordTo(string* target) { in RecordTo()
275 inline void Tokenizer::StopRecording() { in StopRecording()
287 inline void Tokenizer::StartToken() { in StartToken()
295 inline void Tokenizer::EndToken() { in EndToken()
304 inline bool Tokenizer::LookingAt() { in LookingAt()
309 inline bool Tokenizer::TryConsumeOne() { in TryConsumeOne()
[all …]
/external/chromium_org/tools/gn/
Dtokenizer_unittest.cc23 std::vector<Token> results = Tokenizer::Tokenize(&input_file, &err); in CheckTokenizer()
38 TEST(Tokenizer, Empty) { in TEST() argument
43 std::vector<Token> results = Tokenizer::Tokenize(&empty_string_input, &err); in TEST()
49 results = Tokenizer::Tokenize(&whitespace_input, &err); in TEST()
53 TEST(Tokenizer, Identifier) { in TEST() argument
60 TEST(Tokenizer, Integer) { in TEST() argument
68 TEST(Tokenizer, IntegerNoSpace) { in TEST() argument
76 TEST(Tokenizer, String) { in TEST() argument
86 TEST(Tokenizer, Operator) { in TEST() argument
107 TEST(Tokenizer, Scoper) { in TEST() argument
[all …]
Dtokenizer.cc72 Tokenizer::Tokenizer(const InputFile* input_file, Err* err) in Tokenizer() function in Tokenizer
81 Tokenizer::~Tokenizer() { in ~Tokenizer()
85 std::vector<Token> Tokenizer::Tokenize(const InputFile* input_file, Err* err) { in Tokenize()
86 Tokenizer t(input_file, err); in Tokenize()
90 std::vector<Token> Tokenizer::Run() { in Run()
138 size_t Tokenizer::ByteOffsetOfNthLine(const base::StringPiece& buf, int n) { in ByteOffsetOfNthLine()
159 bool Tokenizer::IsNewline(const base::StringPiece& buffer, size_t offset) { in IsNewline()
166 void Tokenizer::AdvanceToNextToken() { in AdvanceToNextToken()
171 Token::Type Tokenizer::ClassifyCurrent() const { in ClassifyCurrent()
220 void Tokenizer::AdvanceToEndOfToken(const Location& location, in AdvanceToEndOfToken()
[all …]
Dtokenizer.h17 class Tokenizer {
45 explicit Tokenizer(const InputFile* input_file, Err* err);
46 ~Tokenizer();
85 DISALLOW_COPY_AND_ASSIGN(Tokenizer);
/external/clang/lib/ASTMatchers/Dynamic/
DParser.cpp236 const TokenInfo NameToken = Tokenizer->consumeNextToken(); in parseMatcherExpressionImpl()
238 const TokenInfo OpenToken = Tokenizer->consumeNextToken(); in parseMatcherExpressionImpl()
247 while (Tokenizer->nextTokenKind() != TokenInfo::TK_Eof) { in parseMatcherExpressionImpl()
248 if (Tokenizer->nextTokenKind() == TokenInfo::TK_CloseParen) { in parseMatcherExpressionImpl()
250 EndToken = Tokenizer->consumeNextToken(); in parseMatcherExpressionImpl()
255 const TokenInfo CommaToken = Tokenizer->consumeNextToken(); in parseMatcherExpressionImpl()
266 ArgValue.Text = Tokenizer->peekNextToken().Text; in parseMatcherExpressionImpl()
267 ArgValue.Range = Tokenizer->peekNextToken().Range; in parseMatcherExpressionImpl()
279 if (Tokenizer->peekNextToken().Kind == TokenInfo::TK_Period) { in parseMatcherExpressionImpl()
281 Tokenizer->consumeNextToken(); // consume the period. in parseMatcherExpressionImpl()
[all …]
/external/protobuf/src/google/protobuf/compiler/
Dparser.cc114 inline bool Parser::LookingAtType(io::Tokenizer::TokenType token_type) { in LookingAtType()
119 return LookingAtType(io::Tokenizer::TYPE_END); in AtEnd()
150 if (LookingAtType(io::Tokenizer::TYPE_IDENTIFIER)) { in ConsumeIdentifier()
161 if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) { in ConsumeInteger()
163 if (!io::Tokenizer::ParseInteger(input_->current().text, in ConsumeInteger()
179 if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) { in ConsumeInteger64()
180 if (!io::Tokenizer::ParseInteger(input_->current().text, max_value, in ConsumeInteger64()
195 if (LookingAtType(io::Tokenizer::TYPE_FLOAT)) { in ConsumeNumber()
196 *output = io::Tokenizer::ParseFloat(input_->current().text); in ConsumeNumber()
199 } else if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) { in ConsumeNumber()
[all …]
/external/chromium_org/third_party/angle/src/compiler/preprocessor/
DTokenizer.h19 class Tokenizer : public Lexer
36 Tokenizer(Diagnostics* diagnostics);
37 ~Tokenizer();
48 PP_DISALLOW_COPY_AND_ASSIGN(Tokenizer);
DTokenizer.l270 Tokenizer::Tokenizer(Diagnostics* diagnostics)
277 Tokenizer::~Tokenizer()
282 bool Tokenizer::init(size_t count, const char* const string[], const int length[])
290 void Tokenizer::setFileNumber(int file)
297 void Tokenizer::setLineNumber(int line)
302 void Tokenizer::lex(Token* token)
321 bool Tokenizer::initScanner()
330 void Tokenizer::destroyScanner()
DDirectiveParser.h20 class Tokenizer; variable
25 DirectiveParser(Tokenizer* tokenizer,
74 Tokenizer* mTokenizer;
Dgenerate_parser.sh25 run_flex Tokenizer.l Tokenizer.cpp
/external/chromium_org/third_party/protobuf/src/google/protobuf/compiler/
Dparser.cc114 inline bool Parser::LookingAtType(io::Tokenizer::TokenType token_type) { in LookingAtType()
119 return LookingAtType(io::Tokenizer::TYPE_END); in AtEnd()
150 if (LookingAtType(io::Tokenizer::TYPE_IDENTIFIER)) { in ConsumeIdentifier()
161 if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) { in ConsumeInteger()
163 if (!io::Tokenizer::ParseInteger(input_->current().text, in ConsumeInteger()
193 if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) { in ConsumeInteger64()
194 if (!io::Tokenizer::ParseInteger(input_->current().text, max_value, in ConsumeInteger64()
209 if (LookingAtType(io::Tokenizer::TYPE_FLOAT)) { in ConsumeNumber()
210 *output = io::Tokenizer::ParseFloat(input_->current().text); in ConsumeNumber()
213 } else if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) { in ConsumeNumber()
[all …]
Dparser.h73 bool Parse(io::Tokenizer* input, FileDescriptorProto* file);
150 inline bool LookingAtType(io::Tokenizer::TokenType token_type);
234 void StartAt(const io::Tokenizer::Token& token);
239 void EndAt(const io::Tokenizer::Token& token);
420 io::Tokenizer* input_;
/external/smack/src/org/xbill/DNS/
DMaster.java23 private Tokenizer st;
38 st = new Tokenizer(file); in Master()
87 st = new Tokenizer(in); in Master()
257 catch (Tokenizer.TokenizerException e) { in nextGenerated()
274 Tokenizer.Token token; in _nextRecord()
293 if (token.type == Tokenizer.WHITESPACE) { in _nextRecord()
294 Tokenizer.Token next = st.get(); in _nextRecord()
295 if (next.type == Tokenizer.EOL) in _nextRecord()
297 else if (next.type == Tokenizer.EOF) in _nextRecord()
305 else if (token.type == Tokenizer.EOL) in _nextRecord()
[all …]
DUpdate.java96 present(Name name, int type, Tokenizer tokenizer) throws IOException { in present()
145 add(Name name, int type, long ttl, Tokenizer tokenizer) throws IOException { in add()
212 delete(Name name, int type, Tokenizer tokenizer) throws IOException { in delete()
263 replace(Name name, int type, long ttl, Tokenizer tokenizer) throws IOException in replace()
/external/chromium_org/tools/gyp/tools/Xcode/Specifications/
Dgyp.xclangspec73 Tokenizer = "xcode.lang.gyp.lexer.toplevel";
108 Tokenizer = "xcode.lang.gyp.lexer";
121 Tokenizer = "xcode.lang.gyp.lexer";
133 Tokenizer = "xcode.lang.gyp.lexer";
144 Tokenizer = "xcode.lang.gyp.lexer";
155 Tokenizer = "xcode.lang.gyp.lexer";
168 Tokenizer = "xcode.lang.gyp.lexer";
183 Tokenizer = "xcode.lang.gyp.lexer";
/external/chromium_org/third_party/protobuf/src/google/protobuf/
Dtext_format.cc209 tokenizer_.set_comment_style(io::Tokenizer::SH_COMMENT_STYLE); in ParserImpl()
223 if (LookingAtType(io::Tokenizer::TYPE_END)) { in Parse()
238 return suc && LookingAtType(io::Tokenizer::TYPE_END); in ParseField()
591 if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) { in ConsumeFieldValue()
616 if (LookingAtType(io::Tokenizer::TYPE_IDENTIFIER)) { in ConsumeFieldValue()
622 LookingAtType(io::Tokenizer::TYPE_INTEGER)) { in ConsumeFieldValue()
654 if (LookingAtType(io::Tokenizer::TYPE_STRING)) { in SkipFieldValue()
655 while (LookingAtType(io::Tokenizer::TYPE_STRING)) { in SkipFieldValue()
682 if (!LookingAtType(io::Tokenizer::TYPE_INTEGER) && in SkipFieldValue()
683 !LookingAtType(io::Tokenizer::TYPE_FLOAT) && in SkipFieldValue()
[all …]
/external/chromium_org/third_party/WebKit/Source/core/html/parser/
DInputStreamPreprocessor.h39 template <typename Tokenizer>
43 InputStreamPreprocessor(Tokenizer* tokenizer) in InputStreamPreprocessor()
128 Tokenizer* m_tokenizer;
/external/doclava/src/com/google/doclava/apicheck/
DApiFile.java67 final Tokenizer tokenizer = new Tokenizer(filename, (new String(buf, 0, size)).toCharArray()); in parseApi()
88 private static void parsePackage(ApiInfo api, Tokenizer tokenizer) in parsePackage()
113 private static void parseClass(ApiInfo api, PackageInfo pkg, Tokenizer tokenizer, String token) in parseClass()
222 private static void parseConstructor(Tokenizer tokenizer, ClassInfo cl, String token) in parseConstructor()
273 private static void parseMethod(Tokenizer tokenizer, ClassInfo cl, String token) in parseMethod()
346 private static void parseField(Tokenizer tokenizer, ClassInfo cl, String token, boolean isEnum) in parseField()
471 private static void parseParameterList(Tokenizer tokenizer, AbstractMethodInfo method, in parseParameterList()
501 private static String parseThrows(Tokenizer tokenizer, AbstractMethodInfo method) in parseThrows()
533 public static void assertIdent(Tokenizer tokenizer, String token) throws ApiParseException { in assertIdent()
539 static class Tokenizer { class in ApiFile
[all …]
/external/protobuf/src/google/protobuf/
Dtext_format.cc133 tokenizer_.set_comment_style(io::Tokenizer::SH_COMMENT_STYLE); in ParserImpl()
147 if (LookingAtType(io::Tokenizer::TYPE_END)) { in Parse()
162 return suc && LookingAtType(io::Tokenizer::TYPE_END); in ParseField()
451 bool LookingAtType(io::Tokenizer::TokenType token_type) { in LookingAtType()
458 if (!LookingAtType(io::Tokenizer::TYPE_IDENTIFIER)) { in ConsumeIdentifier()
472 if (!LookingAtType(io::Tokenizer::TYPE_STRING)) { in ConsumeString()
478 while (LookingAtType(io::Tokenizer::TYPE_STRING)) { in ConsumeString()
479 io::Tokenizer::ParseStringAppend(tokenizer_.current().text, text); in ConsumeString()
490 if (!LookingAtType(io::Tokenizer::TYPE_INTEGER)) { in ConsumeUnsignedInteger()
495 if (!io::Tokenizer::ParseInteger(tokenizer_.current().text, in ConsumeUnsignedInteger()
[all …]
/external/clang/include/clang/ASTMatchers/Dynamic/
DParser.h135 Parser(CodeTokenizer *Tokenizer, Sema *S,
141 CodeTokenizer *const Tokenizer; variable

12345