| /external/mesa3d/src/freedreno/ir3/ |
| D | ir3_lexer.l | 32 #define TOKEN(t) (ir3_yylval.tok = t) macro 100 "@localsize" return TOKEN(T_A_LOCALSIZE); 101 "@const" return TOKEN(T_A_CONST); 102 "@buf" return TOKEN(T_A_BUF); 103 "@invocationid" return TOKEN(T_A_INVOCATIONID); 104 "@wgid" return TOKEN(T_A_WGID); 105 "@numwg" return TOKEN(T_A_NUMWG); 106 "@branchstack" return TOKEN(T_A_BRANCHSTACK); 107 "@in" return TOKEN(T_A_IN); 108 "@out" return TOKEN(T_A_OUT); [all …]
|
| D | ir3_parser.y | 328 %token <num> T_INT 329 %token <unum> T_HEX 330 %token <flt> T_FLOAT 331 %token <str> T_IDENTIFIER 332 %token <num> T_REGISTER 333 %token <num> T_CONSTANT 336 %token <tok> T_A_LOCALSIZE 337 %token <tok> T_A_CONST 338 %token <tok> T_A_BUF 339 %token <tok> T_A_INVOCATIONID [all …]
|
| /external/angle/src/compiler/preprocessor/ |
| D | DirectiveParser.cpp | 19 #include "compiler/preprocessor/Token.h" 45 DirectiveType getDirective(const pp::Token *token) in getDirective() argument 61 if (token->type != pp::Token::IDENTIFIER) in getDirective() 64 if (token->text == kDirectiveDefine) in getDirective() 66 if (token->text == kDirectiveUndef) in getDirective() 68 if (token->text == kDirectiveIf) in getDirective() 70 if (token->text == kDirectiveIfdef) in getDirective() 72 if (token->text == kDirectiveIfndef) in getDirective() 74 if (token->text == kDirectiveElse) in getDirective() 76 if (token->text == kDirectiveElif) in getDirective() [all …]
|
| /external/icing/icing/tokenization/ |
| D | rfc822-tokenizer_test.cc | 49 EqualsToken(Token::Type::RFC822_TOKEN, "<alex>"), in TEST() 50 EqualsToken(Token::Type::RFC822_HOST_ADDRESS, "alex"), in TEST() 51 EqualsToken(Token::Type::RFC822_ADDRESS, "alex"), in TEST() 52 EqualsToken(Token::Type::RFC822_ADDRESS_COMPONENT_HOST, "alex"), in TEST() 53 EqualsToken(Token::Type::RFC822_TOKEN, "<tom>"), in TEST() 54 EqualsToken(Token::Type::RFC822_HOST_ADDRESS, "tom"), in TEST() 55 EqualsToken(Token::Type::RFC822_ADDRESS, "tom"), in TEST() 56 EqualsToken(Token::Type::RFC822_ADDRESS_COMPONENT_HOST, "tom")))); in TEST() 67 EqualsToken(Token::Type::RFC822_TOKEN, "<你alex@google.com>"), in TEST() 68 EqualsToken(Token::Type::RFC822_LOCAL_ADDRESS, "你alex"), in TEST() [all …]
|
| /external/antlr/runtime/Ruby/test/functional/lexer/ |
| D | basic.rb | 20 token = lexer.next_token 21 token.name.should == 'ZERO' 23 token = lexer.next_token 24 token.name.should == '<EOF>' 30 token_types = lexer.map { |token| token.name } 34 example "mismatched token" do 38 token = lexer.next_token 62 token = lexer.next_token 63 token.name.should == 'ZERO' 65 token = lexer.next_token [all …]
|
| /external/pdfium/xfa/fxfa/formcalc/ |
| D | cxfa_fmlexer_unittest.cpp | 14 CXFA_FMLexer::Token token = lexer.NextToken(); in TEST() local 15 EXPECT_EQ(TOKeof, token.GetType()); in TEST() 21 CXFA_FMLexer::Token token = lexer.NextToken(); in TEST() local 22 EXPECT_EQ(TOKeof, token.GetType()); in TEST() 29 CXFA_FMLexer::Token token = lexer.NextToken(); in TEST() local 31 EXPECT_EQ(TOKminus, token.GetType()); in TEST() 32 token = lexer.NextToken(); in TEST() 33 EXPECT_EQ(L"12", token.GetString()); in TEST() 34 token = lexer.NextToken(); in TEST() 35 EXPECT_EQ(TOKeof, token.GetType()); in TEST() [all …]
|
| /external/deqp-deps/amber/src/amberscript/ |
| D | parser.cc | 207 for (auto token = tokenizer->NextToken();; token = tokenizer->NextToken()) { in ParseBufferData() local 208 if (token->IsEOL()) in ParseBufferData() 210 if (token->IsEOS()) { in ParseBufferData() 217 if (token->IsIdentifier() && token->AsString() == "END") in ParseBufferData() 219 if (!token->IsInteger() && !token->IsDouble() && !token->IsHex()) in ParseBufferData() 220 return Result("invalid BUFFER data value: " + token->ToOriginalString()); in ParseBufferData() 230 token->ConvertToDouble(); in ParseBufferData() 232 double val = token->IsHex() ? static_cast<double>(token->AsHex()) in ParseBufferData() 233 : token->AsDouble(); in ParseBufferData() 237 if (token->IsDouble()) { in ParseBufferData() [all …]
|
| /external/rust/crates/xml-rs/src/reader/ |
| D | lexer.rs | 18 /// `Token` represents a single lexeme of an XML document. These lexemes 21 pub(crate) enum Token { enum 60 impl fmt::Display for Token { implementation 64 Token::Character(c) => c.fmt(f), in fmt() 66 Token::OpeningTagStart => "<", in fmt() 67 Token::ProcessingInstructionStart => "<?", in fmt() 68 Token::DoctypeStart => "<!DOCTYPE", in fmt() 69 Token::ClosingTagStart => "</", in fmt() 70 Token::CommentStart => "<!--", in fmt() 71 Token::CDataStart => "<![CDATA[", in fmt() [all …]
|
| /external/angle/src/tests/preprocessor_tests/ |
| D | token_test.cpp | 9 #include "compiler/preprocessor/Token.h" 16 pp::Token token; in TEST() local 17 EXPECT_EQ(0, token.type); in TEST() 18 EXPECT_EQ(0u, token.flags); in TEST() 19 EXPECT_EQ(0, token.location.line); in TEST() 20 EXPECT_EQ(0, token.location.file); in TEST() 21 EXPECT_EQ("", token.text); in TEST() 26 pp::Token token; in TEST() local 27 token.type = 1; in TEST() 28 token.flags = 1; in TEST() [all …]
|
| /external/pigweed/pw_console/py/pw_console/ |
| D | pigweed_code_style.py | 21 from pygments.token import Token # type: ignore 24 Token.Comment: '#778899', # Lighter comments 25 Token.Comment.Hashbang: '#778899', 26 Token.Comment.Multiline: '#778899', 27 Token.Comment.Preproc: '#ff79c6', 28 Token.Comment.PreprocFile: '', 29 Token.Comment.Single: '#778899', 30 Token.Comment.Special: '#778899', 31 Token.Error: '#f8f8f2', 32 Token.Escape: '', [all …]
|
| /external/antlr/runtime/ObjC/Framework/test/runtime/token/ |
| D | CommonTokenTest.m | 17 CommonToken *token = [[CommonToken newToken] retain]; 18 STAssertNotNil(token, @"Token was nil"); 19 [token release]; 24 CommonToken *token = [[CommonToken eofToken] retain]; 25 STAssertNotNil(token, @"Token was nil"); 26 STAssertEquals(token.type, (NSInteger)TokenTypeEOF, @"Token was not of type TokenTypeEOF"); 27 [token release]; 32 CommonToken *token = [[CommonToken newToken:TokenTypeUP] retain]; 33 token.text = @"<UP>"; 34 STAssertNotNil(token, @"Token was nil"); [all …]
|
| /external/antlr/runtime/C/src/ |
| D | antlr3commontoken.c | 2 * Contains the default implementation of the common token used within 38 /* Token API 40 static pANTLR3_STRING getText (pANTLR3_COMMON_TOKEN token); 41 static void setText (pANTLR3_COMMON_TOKEN token, pANTLR3_STRING text); 42 static void setText8 (pANTLR3_COMMON_TOKEN token, pANTLR3_UINT8 text); 43 static ANTLR3_UINT32 getType (pANTLR3_COMMON_TOKEN token); 44 static void setType (pANTLR3_COMMON_TOKEN token, ANTLR3_UINT32 type); 45 static ANTLR3_UINT32 getLine (pANTLR3_COMMON_TOKEN token); 46 static void setLine (pANTLR3_COMMON_TOKEN token, ANTLR3_UINT32 line); 47 static ANTLR3_INT32 getCharPositionInLine (pANTLR3_COMMON_TOKEN token); [all …]
|
| /external/libxml2/doc/ |
| D | apibuild.py | 394 def push(self, token): argument 395 self.tokens.insert(0, token); 398 print("Last token: ", self.last) 399 print("Token queue: ", self.tokens) 402 def token(self): member in CLexer 623 def error(self, msg, token=-1): argument 628 if token != -1: 629 print("Got token ", token) 633 def debug(self, msg, token=-1): argument 635 if token != -1: [all …]
|
| /external/sdv/vsomeip/third_party/boost/spirit/doc/lex/ |
| D | tokens_values.qbk | 9 [section:lexer_token_values About Tokens and Token Values] 13 time separated by whitespace. The different token types recognized by a lexical 14 analyzer often get assigned unique integer token identifiers (token ids). These 15 token ids are normally used by the parser to identify the current token without 17 different with respect to this, as it uses the token ids as the main means of 18 identification of the different token types defined for a particular lexical 20 sense that it returns (references to) instances of a (user defined) token class 21 to the user. The only limitation of this token class is that it must carry at 22 least the token id of the token it represents. For more information about the 23 interface a user defined token type has to expose please look at the [all …]
|
| /external/angle/util/capture/ |
| D | trace_interpreter.h | 28 using Token = char[kMaxTokenSize]; variable 30 CallCapture ParseCallCapture(const Token &nameToken, 32 const Token *paramTokens, 36 void PackParameter(ParamBuffer ¶ms, const Token &token, const TraceStringMap &strings); 40 const Token &token, 44 void PackParameter<int32_t>(ParamBuffer ¶ms, const Token &token, const TraceStringMap &strings); 47 void PackParameter<void *>(ParamBuffer ¶ms, const Token &token, const TraceStringMap &strings); 51 const Token &token, 55 void PackParameter<void **>(ParamBuffer ¶ms, const Token &token, const TraceStringMap &strings); 59 const Token &token, [all …]
|
| D | trace_interpreter.cpp | 165 void readToken(Token &token, char delim) in readToken() argument 171 memcpy(token, &mStream[startIndex], tokenSize); in readToken() 172 token[mIndex - startIndex] = 0; in readToken() 220 Token nameToken; in readFunction() 224 Token paramTokens[kMaxParameters]; in readFunction() 236 Token &token = paramTokens[numParams++]; in readFunction() local 238 memcpy(token, &mStream[tokenStart], tokenSize); in readFunction() 239 token[tokenSize] = 0; in readFunction() 333 void PackResourceID(ParamBuffer ¶ms, const Token &token) in PackResourceID() argument 335 ASSERT(token[0] == 'g'); in PackResourceID() [all …]
|
| /external/cronet/base/win/ |
| D | access_token.h | 21 // Impersonation level for the token. 29 // This class is used to access the information for a Windows access token. 32 // This class represents an access token group. 57 // This class represents an access token privilege. 76 // Creates an AccessToken object from a token handle. 77 // |token| the token handle. This handle will be duplicated for TOKEN_QUERY 78 // access, therefore the caller must be granted that access to the token 79 // object. The AccessToken object owns its own copy of the token handle so 81 // |desired_access| specifies additional access for the token handle, 83 static absl::optional<AccessToken> FromToken(HANDLE token, [all …]
|
| /external/snakeyaml/src/main/java/org/yaml/snakeyaml/parser/ |
| D | ParserImpl.java | 53 import org.yaml.snakeyaml.tokens.Token; 187 private CommentEvent produceCommentEvent(CommentToken token) { in produceCommentEvent() argument 188 Mark startMark = token.getStartMark(); in produceCommentEvent() 189 Mark endMark = token.getEndMark(); in produceCommentEvent() 190 String value = token.getValue(); in produceCommentEvent() 191 CommentType type = token.getCommentType(); in produceCommentEvent() 209 StreamStartToken token = (StreamStartToken) scanner.getToken(); in produce() local 210 Event event = new StreamStartEvent(token.getStartMark(), token.getEndMark()); in produce() 221 if (scanner.checkToken(Token.ID.Comment)) { in produce() 225 if (!scanner.checkToken(Token.ID.Directive, Token.ID.DocumentStart, Token.ID.StreamEnd)) { in produce() [all …]
|
| /external/doclava/src/com/google/doclava/apicheck/ |
| D | ApiFile.java | 74 String token = tokenizer.getToken(); in parseApi() local 75 if (token == null) { in parseApi() 78 if ("package".equals(token)) { in parseApi() 81 throw new ApiParseException("expected package got " + token, tokenizer.getLine()); in parseApi() 93 String token; in parsePackage() local 97 token = tokenizer.requireToken(); in parsePackage() 98 assertIdent(tokenizer, token); in parsePackage() 99 name = token; in parsePackage() 101 token = tokenizer.requireToken(); in parsePackage() 102 if (!"{".equals(token)) { in parsePackage() [all …]
|
| /external/deqp-deps/amber/src/vkscript/ |
| D | command_parser.cc | 87 for (auto token = tokenizer_->NextToken(); !token->IsEOS(); in Parse() local 88 token = tokenizer_->NextToken()) { in Parse() 89 if (token->IsEOL()) in Parse() 92 if (!token->IsIdentifier()) { in Parse() 95 token->ToOriginalString())); in Parse() 98 std::string cmd_name = token->AsString(); in Parse() 101 token = tokenizer_->NextToken(); in Parse() 102 if (!token->IsIdentifier()) in Parse() 104 token->ToOriginalString())); in Parse() 106 cmd_name = token->AsString(); in Parse() [all …]
|
| /external/antlr/runtime/Python/tests/ |
| D | t010lexer.py | 27 token = lexer.nextToken() 28 assert token.type == self.lexerModule.IDENTIFIER 29 assert token.start == 0, token.start 30 assert token.stop == 5, token.stop 31 assert token.text == 'foobar', token.text 33 token = lexer.nextToken() 34 assert token.type == self.lexerModule.WS 35 assert token.start == 6, token.start 36 assert token.stop == 6, token.stop 37 assert token.text == ' ', token.text [all …]
|
| D | t011lexer.py | 27 token = lexer.nextToken() 28 assert token.type == self.lexerModule.IDENTIFIER 29 assert token.start == 0, token.start 30 assert token.stop == 5, token.stop 31 assert token.text == 'foobar', token.text 33 token = lexer.nextToken() 34 assert token.type == self.lexerModule.WS 35 assert token.start == 6, token.start 36 assert token.stop == 6, token.stop 37 assert token.text == ' ', token.text [all …]
|
| /external/rust/crates/serde_test/src/ |
| D | de.rs | 2 use crate::token::Token; 12 tokens: &'de [Token], 15 fn assert_next_token(de: &mut Deserializer, expected: Token) -> Result<(), Error> { in assert_next_token() 17 Some(token) if token == expected => Ok(()), in assert_next_token() 19 "expected Token::{} but deserialization wants Token::{}", in assert_next_token() 23 "end of tokens but deserialization wants Token::{}", in assert_next_token() 29 fn unexpected(token: Token) -> Error { in unexpected() 31 "deserialization did not expect this token: {}", in unexpected() 32 token, in unexpected() 41 pub fn new(tokens: &'de [Token]) -> Self { in new() [all …]
|
| /external/python/cpython3/Doc/library/ |
| D | token-list.inc | 18 Token value for ``"("``. 22 Token value for ``")"``. 26 Token value for ``"["``. 30 Token value for ``"]"``. 34 Token value for ``":"``. 38 Token value for ``","``. 42 Token value for ``";"``. 46 Token value for ``"+"``. 50 Token value for ``"-"``. 54 Token value for ``"*"``. [all …]
|
| /external/libxml2/ |
| D | rngparser.c | 54 const xmlChar *token; member 87 tokenPtr token; member 239 * Scan the schema to get the next token 247 tokenPtr token; in xmlCRNGNextToken() local 251 token = &(ctxt->tokens[(ctxt->firstToken + ctxt->nbTokens) % MAX_TOKEN]); in xmlCRNGNextToken() 252 token->toktype = CRNG_NONE; in xmlCRNGNextToken() 286 token->toklen = cur - ctxt->cur; in xmlCRNGNextToken() 287 token->token = xmlDictLookup(ctxt->dict, ctxt->cur, token->toklen); in xmlCRNGNextToken() 288 token->toktype = CRNG_LITERAL_SEGMENT; in xmlCRNGNextToken() 289 token->prefix = NULL; in xmlCRNGNextToken() [all …]
|