Home
last modified time | relevance | path

Searched refs:Tokenizer (Results 1 – 25 of 79) sorted by relevance

1234

/external/protobuf/src/google/protobuf/io/
Dtokenizer_unittest.cc184 EXPECT_TRUE(Tokenizer::ParseInteger(text, kuint64max, &result)); in ParseInteger()
199 Tokenizer::TokenType type;
209 { "hello", Tokenizer::TYPE_IDENTIFIER },
212 { "123", Tokenizer::TYPE_INTEGER },
213 { "0xab6", Tokenizer::TYPE_INTEGER },
214 { "0XAB6", Tokenizer::TYPE_INTEGER },
215 { "0X1234567", Tokenizer::TYPE_INTEGER },
216 { "0x89abcdef", Tokenizer::TYPE_INTEGER },
217 { "0x89ABCDEF", Tokenizer::TYPE_INTEGER },
218 { "01234567", Tokenizer::TYPE_INTEGER },
[all …]
Dtokenizer.cc187 Tokenizer::Tokenizer(ZeroCopyInputStream* input, in Tokenizer() function in google::protobuf::io::Tokenizer
212 Tokenizer::~Tokenizer() { in ~Tokenizer()
223 void Tokenizer::NextChar() { in NextChar()
244 void Tokenizer::Refresh() { in Refresh()
274 inline void Tokenizer::RecordTo(string* target) { in RecordTo()
279 inline void Tokenizer::StopRecording() { in StopRecording()
291 inline void Tokenizer::StartToken() { in StartToken()
299 inline void Tokenizer::EndToken() { in EndToken()
308 inline bool Tokenizer::LookingAt() { in LookingAt()
313 inline bool Tokenizer::TryConsumeOne() { in TryConsumeOne()
[all …]
Dtokenizer.h53 class Tokenizer; variable
91 class LIBPROTOBUF_EXPORT Tokenizer {
96 Tokenizer(ZeroCopyInputStream* input, ErrorCollector* error_collector);
97 ~Tokenizer();
257 GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Tokenizer);
394 inline const Tokenizer::Token& Tokenizer::current() { in current()
398 inline const Tokenizer::Token& Tokenizer::previous() { in previous()
402 inline void Tokenizer::ParseString(const string& text, string* output) { in ParseString()
/external/swiftshader/third_party/llvm-7.0/llvm/tools/llvm-rc/
DResourceScriptToken.cpp86 class Tokenizer { class
88 Tokenizer(StringRef Input) : Data(Input), DataLength(Input.size()) {} in Tokenizer() function in __anon3afa1a770111::Tokenizer
148 void Tokenizer::skipCurrentLine() { in skipCurrentLine()
156 Expected<std::vector<RCToken>> Tokenizer::run() { in run()
199 bool Tokenizer::advance(size_t Amount) { in advance()
204 bool Tokenizer::skipWhitespaces() { in skipWhitespaces()
210 Error Tokenizer::consumeToken(const Kind TokenKind) { in consumeToken()
276 bool Tokenizer::willNowRead(StringRef FollowingChars) const { in willNowRead()
280 bool Tokenizer::canStartIdentifier() const { in canStartIdentifier()
287 bool Tokenizer::canContinueIdentifier() const { in canContinueIdentifier()
[all …]
/external/parameter-framework/upstream/test/tokenizer/
DTest.cpp49 Tokenizer tokenizer("a bcd ef");
58 Tokenizer tokenizer("");
67 Tokenizer tokenizer(" a \n\t bc ");
77 Tokenizer tokenizer("/a/bcd/ef g/h/", "/");
88 Tokenizer tokenizer("", Tokenizer::defaultDelimiters, false);
97 Tokenizer tokenizer(",", ",", false);
106 Tokenizer tokenizer(" a b \nc d ", Tokenizer::defaultDelimiters, false);
/external/clang/lib/ASTMatchers/Dynamic/
DParser.cpp294 const TokenInfo NameToken = Tokenizer->consumeNextToken(); in parseIdentifierPrefixImpl()
296 if (Tokenizer->nextTokenKind() != TokenInfo::TK_OpenParen) { in parseIdentifierPrefixImpl()
306 if ((Tokenizer->nextTokenKind() == TokenInfo::TK_Comma || in parseIdentifierPrefixImpl()
307 Tokenizer->nextTokenKind() == TokenInfo::TK_CloseParen || in parseIdentifierPrefixImpl()
308 Tokenizer->nextTokenKind() == TokenInfo::TK_Eof) && in parseIdentifierPrefixImpl()
328 const TokenInfo OpenToken = Tokenizer->consumeNextToken(); in parseMatcherExpressionImpl()
349 while (Tokenizer->nextTokenKind() != TokenInfo::TK_Eof) { in parseMatcherExpressionImpl()
350 if (Tokenizer->nextTokenKind() == TokenInfo::TK_CloseParen) { in parseMatcherExpressionImpl()
352 EndToken = Tokenizer->consumeNextToken(); in parseMatcherExpressionImpl()
357 const TokenInfo CommaToken = Tokenizer->consumeNextToken(); in parseMatcherExpressionImpl()
[all …]
/external/deqp/executor/
DxeXMLParser.hpp83 class Tokenizer class
86 Tokenizer (void);
87 ~Tokenizer (void);
103 Tokenizer (const Tokenizer& other);
104 Tokenizer& operator= (const Tokenizer& other);
190 Tokenizer m_tokenizer;
203 inline void Tokenizer::getTokenStr (std::string& dst) const in getTokenStr()
211 inline void Tokenizer::appendTokenStr (std::string& dst) const in appendTokenStr()
DxeXMLParser.cpp57 Tokenizer::Tokenizer (void) in Tokenizer() function in xe::xml::Tokenizer
65 Tokenizer::~Tokenizer (void) in ~Tokenizer()
69 void Tokenizer::clear (void) in clear()
77 void Tokenizer::error (const std::string& what) in error()
82 void Tokenizer::feed (const deUint8* bytes, int numBytes) in feed()
98 int Tokenizer::getChar (int offset) const in getChar()
108 void Tokenizer::advance (void) in advance()
338 void Tokenizer::getString (std::string& dst) const in getString()
/external/libtextclassifier/utils/
Dtokenizer.cc27 Tokenizer::Tokenizer( in Tokenizer() function in libtextclassifier3::Tokenizer
52 const TokenizationCodepointRangeT* Tokenizer::FindTokenizationRange( in FindTokenizationRange()
77 void Tokenizer::GetScriptAndRole(char32 codepoint, in GetScriptAndRole()
90 std::vector<Token> Tokenizer::Tokenize(const std::string& text) const { in Tokenize()
95 std::vector<Token> Tokenizer::Tokenize(const UnicodeText& text_unicode) const { in Tokenize()
117 std::vector<Token> Tokenizer::InternalTokenize( in InternalTokenize()
160 void Tokenizer::TokenizeSubstring(const UnicodeText& unicode_text, in TokenizeSubstring()
185 void Tokenizer::InternalRetokenize(const UnicodeText& unicode_text, in InternalRetokenize()
217 bool Tokenizer::ICUTokenize(const UnicodeText& context_unicode, in ICUTokenize()
Dtokenizer.h37 class Tokenizer {
49 Tokenizer(
57 Tokenizer( in Tokenizer() function
60 : Tokenizer(TokenizationType_INTERNAL_TOKENIZER, /*unilib=*/nullptr, in Tokenizer()
/external/swiftshader/src/OpenGL/compiler/preprocessor/
DTokenizer.l315 Tokenizer::Tokenizer(Diagnostics *diagnostics) : mHandle(nullptr), mMaxTokenSize(1024)
320 Tokenizer::~Tokenizer()
325 bool Tokenizer::init(size_t count, const char * const string[], const int length[])
334 void Tokenizer::setFileNumber(int file)
341 void Tokenizer::setLineNumber(int line)
346 void Tokenizer::setMaxTokenSize(size_t maxTokenSize)
351 void Tokenizer::lex(Token *token)
381 bool Tokenizer::initScanner()
390 void Tokenizer::destroyScanner()
DTokenizer.h27 class Tokenizer : public Lexer
44 Tokenizer(Diagnostics *diagnostics);
45 ~Tokenizer() override;
56 PP_DISALLOW_COPY_AND_ASSIGN(Tokenizer);
DDirectiveParser.h28 class Tokenizer; variable
33 DirectiveParser(Tokenizer *tokenizer,
87 Tokenizer *mTokenizer;
/external/parameter-framework/upstream/utility/
DTokenizer.cpp35 const string Tokenizer::defaultDelimiters = " \n\r\t\v\f";
37 Tokenizer::Tokenizer(const string &input, const string &delimiters, bool mergeDelimiters) in Tokenizer() function in Tokenizer
42 vector<string> Tokenizer::split() in split()
DTokenizer.h42 class Tokenizer : private utility::NonCopyable
54 Tokenizer(const std::string &input, const std::string &delimiters = defaultDelimiters,
56 ~Tokenizer(){}; in ~Tokenizer()
/external/libtextclassifier/actions/
Dngram-model.h32 const NGramLinearRegressionModel* model, const Tokenizer* tokenizer,
46 const Tokenizer* tokenizer, const UniLib* unilib);
58 const Tokenizer* tokenizer_;
59 std::unique_ptr<Tokenizer> owned_tokenizer_;
Dfeature-processor.h33 std::unique_ptr<Tokenizer> CreateTokenizer(
61 const Tokenizer* tokenizer() const { return tokenizer_.get(); } in tokenizer()
65 const std::unique_ptr<Tokenizer> tokenizer_;
/external/protobuf/src/google/protobuf/compiler/
Dparser.cc141 inline bool Parser::LookingAtType(io::Tokenizer::TokenType token_type) { in LookingAtType()
146 return LookingAtType(io::Tokenizer::TYPE_END); in AtEnd()
177 if (LookingAtType(io::Tokenizer::TYPE_IDENTIFIER)) { in ConsumeIdentifier()
188 if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) { in ConsumeInteger()
190 if (!io::Tokenizer::ParseInteger(input_->current().text, in ConsumeInteger()
220 if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) { in ConsumeInteger64()
221 if (!io::Tokenizer::ParseInteger(input_->current().text, max_value, in ConsumeInteger64()
236 if (LookingAtType(io::Tokenizer::TYPE_FLOAT)) { in ConsumeNumber()
237 *output = io::Tokenizer::ParseFloat(input_->current().text); in ConsumeNumber()
240 } else if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) { in ConsumeNumber()
[all …]
Dparser.h72 bool Parse(io::Tokenizer* input, FileDescriptorProto* file);
149 inline bool LookingAtType(io::Tokenizer::TokenType token_type);
236 void StartAt(const io::Tokenizer::Token& token);
244 void EndAt(const io::Tokenizer::Token& token);
505 io::Tokenizer* input_;
/external/cldr/tools/java/org/unicode/cldr/util/
DBNF.java24 private Tokenizer t;
64 t = new Tokenizer(); in BNF()
149 if (type == Tokenizer.DONE) return false; in addRule()
150 if (type != Tokenizer.STRING) error("missing weight"); in addRule()
201 if (t.next() != Tokenizer.NUMBER) error("missing number"); in qualify()
208 if (type == Tokenizer.NUMBER) { in qualify()
223 if (token == Tokenizer.STRING) { in getCore()
228 if (token == Tokenizer.UNICODESET) { in getCore()
298 if (token != Tokenizer.NUMBER) { in getWeight()
/external/tensorflow/tensorflow/python/keras/preprocessing/
Dtext.py29 Tokenizer = text.Tokenizer variable
35 keras_export('keras.preprocessing.text.Tokenizer')(Tokenizer)
Dtext_test.py50 tokenizer = keras.preprocessing.text.Tokenizer(num_words=10)
85 tokenizer = keras.preprocessing.text.Tokenizer()
91 tokenizer = keras.preprocessing.text.Tokenizer(oov_token='<unk>')
103 tokenizer = keras.preprocessing.text.Tokenizer()
136 tokenizer = keras.preprocessing.text.Tokenizer(num_words=5)
/external/protobuf/src/google/protobuf/
Dtext_format.cc255 tokenizer_.set_comment_style(io::Tokenizer::SH_COMMENT_STYLE); in ParserImpl()
274 if (LookingAtType(io::Tokenizer::TYPE_END)) { in Parse()
289 return suc && LookingAtType(io::Tokenizer::TYPE_END); in ParseField()
684 if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) { in ConsumeFieldValue()
709 if (LookingAtType(io::Tokenizer::TYPE_IDENTIFIER)) { in ConsumeFieldValue()
715 LookingAtType(io::Tokenizer::TYPE_INTEGER)) { in ConsumeFieldValue()
753 if (LookingAtType(io::Tokenizer::TYPE_STRING)) { in SkipFieldValue()
754 while (LookingAtType(io::Tokenizer::TYPE_STRING)) { in SkipFieldValue()
781 if (!LookingAtType(io::Tokenizer::TYPE_INTEGER) && in SkipFieldValue()
782 !LookingAtType(io::Tokenizer::TYPE_FLOAT) && in SkipFieldValue()
[all …]
/external/cldr/tools/cldr-unittest/src/org/unicode/cldr/unittest/
DTestBNF.java14 import org.unicode.cldr.util.Tokenizer;
231 Tokenizer t = new Tokenizer(); in testTokenizer()
240 while (type != Tokenizer.DONE) { in testTokenizer()
/external/doclava/src/com/google/doclava/apicheck/
DApiFile.java70 final Tokenizer tokenizer = new Tokenizer(filename, (new String(buf, 0, size)).toCharArray()); in parseApi()
91 private static void parsePackage(ApiInfo api, Tokenizer tokenizer) in parsePackage()
116 private static void parseClass(ApiInfo api, PackageInfo pkg, Tokenizer tokenizer, String token) in parseClass()
232 private static void parseConstructor(Tokenizer tokenizer, ClassInfo cl, String token) in parseConstructor()
287 private static void parseMethod(Tokenizer tokenizer, ClassInfo cl, String token) in parseMethod()
378 private static void parseField(Tokenizer tokenizer, ClassInfo cl, String token, boolean isEnum) in parseField()
507 private static void parseTypeParameterList(Tokenizer tokenizer, in parseTypeParameterList()
546 private static void parseParameterList(Tokenizer tokenizer, AbstractMethodInfo method, in parseParameterList()
583 private static String parseThrows(Tokenizer tokenizer, AbstractMethodInfo method) in parseThrows()
615 public static void assertIdent(Tokenizer tokenizer, String token) throws ApiParseException { in assertIdent()
[all …]

1234