/external/python/cpython3/Lib/email/ |
D | _header_value_parser.py | 105 token_type = None variable in TokenList 156 self.token_type) 178 return [x.content for x in self if x.token_type=='comment'] 183 token_type = 'unstructured' variable in UnstructuredTokenList 188 token_type = 'phrase' variable in Phrase 192 token_type = 'word' variable in Word 197 token_type = 'cfws' variable in CFWSList 202 token_type = 'atom' variable in Atom 207 token_type = 'token' variable in Token 213 token_type = 'encoded-word' variable in EncodedWord [all …]
|
/external/autotest/client/common_lib/cros/ |
D | dbus_send.py | 50 token_type = token_stream.pop(0) 51 if token_type == 'variant': 52 token_type = token_stream.pop(0) 53 if token_type == 'object': 54 token_type = token_stream.pop(0) # Should be 'path' 58 if token_type in INT_TYPES: 60 if token_type == 'string' or token_type == 'path': 62 if token_type == 'boolean': 64 if token_type == 'double': 66 if token_type == 'array': [all …]
|
/external/google-breakpad/src/testing/scripts/generator/cpp/ |
D | tokenize.py | 74 def __init__(self, token_type, name, start, end): argument 75 self.token_type = token_type 147 token_type = UNKNOWN 151 token_type = NAME 159 token_type = CONSTANT 162 token_type = CONSTANT 173 token_type = SYNTAX 183 token_type = SYNTAX 186 token_type = CONSTANT 196 token_type = CONSTANT [all …]
|
D | ast.py | 549 if parts[-1].token_type == tokenize.NAME: 579 if (type_name and type_name[-1].token_type == tokenize.NAME and 580 p.token_type == tokenize.NAME): 713 if token.token_type == _INTERNAL_TOKEN: 738 if token.token_type == tokenize.NAME: 749 if next.token_type == tokenize.SYNTAX and next.name == '(': 805 elif token.token_type == tokenize.SYNTAX: 811 if (token.token_type == tokenize.NAME and 815 elif token.token_type == tokenize.PREPROCESSOR: 854 while (last_token.token_type != expected_token_type or [all …]
|
/external/googletest/googlemock/scripts/generator/cpp/ |
D | tokenize.py | 74 def __init__(self, token_type, name, start, end): argument 75 self.token_type = token_type 147 token_type = UNKNOWN 151 token_type = NAME 159 token_type = CONSTANT 162 token_type = CONSTANT 173 token_type = SYNTAX 183 token_type = SYNTAX 186 token_type = CONSTANT 196 token_type = CONSTANT [all …]
|
D | ast.py | 551 if parts[-1].token_type == tokenize.NAME: 581 if (type_name and type_name[-1].token_type == tokenize.NAME and 582 p.token_type == tokenize.NAME): 714 if token.token_type == _INTERNAL_TOKEN: 739 if token.token_type == tokenize.NAME: 750 if next.token_type == tokenize.SYNTAX and next.name == '(': 806 elif token.token_type == tokenize.SYNTAX: 812 if (token.token_type == tokenize.NAME and 816 elif token.token_type == tokenize.PREPROCESSOR: 855 while (last_token.token_type != expected_token_type or [all …]
|
/external/googletest/googletest/scripts/ |
D | pump.py | 140 def __init__(self, start=None, end=None, value=None, token_type=None): argument 150 self.token_type = token_type 154 self.start, self.value, self.token_type) 160 self.token_type) 171 for (regex, token_type) in token_table: 178 best_match_token_type = token_type 197 (start_column, length, token_type) = m 202 return MakeToken(lines, found_start, found_end, token_type) 237 def MakeToken(lines, start, end, token_type): argument 240 return Token(start, end, SubString(lines, start, end), token_type) [all …]
|
/external/google-breakpad/src/testing/gtest/scripts/ |
D | pump.py | 140 def __init__(self, start=None, end=None, value=None, token_type=None): argument 150 self.token_type = token_type 154 self.start, self.value, self.token_type) 160 self.token_type) 171 for (regex, token_type) in token_table: 178 best_match_token_type = token_type 197 (start_column, length, token_type) = m 202 return MakeToken(lines, found_start, found_end, token_type) 237 def MakeToken(lines, start, end, token_type): argument 240 return Token(start, end, SubString(lines, start, end), token_type) [all …]
|
/external/python/cpython3/Lib/test/test_email/ |
D | test__header_value_parser.py | 17 self.assertEqual(x.token_type, 'fws') 70 self.assertEqual(fws.token_type, 'fws') 372 self.assertEqual(ptext.token_type, 'ptext') 424 self.assertEqual(ptext.token_type, 'ptext') 464 self.assertEqual(atext.token_type, 'atext') 494 self.assertEqual(bqs.token_type, 'bare-quoted-string') 557 self.assertEqual(comment.token_type, 'comment') 636 self.assertEqual(cfws.token_type, 'cfws') 684 self.assertEqual(qs.token_type, 'quoted-string') 767 self.assertEqual(atom.token_type, 'atom') [all …]
|
/external/chromium-trace/catapult/common/py_utils/py_utils/refactor/ |
D | offset_token.py | 26 def __init__(self, token_type, string, offset): argument 27 self._type = token_type 70 token_type, string, (srow, scol), _, _ = next_token 72 offset_tokens.append(OffsetToken(token_type, string, (0, 0))) 76 offset_tokens.append(OffsetToken(token_type, string, (0, scol - ecol))) 79 token_type, string, (srow - erow, scol)))
|
D | snippet.py | 97 def __init__(self, token_type, tokens): argument 102 self._type = token_type 107 def Create(cls, token_type, string, offset=(0, 0)): argument 108 return cls(token_type, 109 [offset_token.OffsetToken(token_type, string, offset)])
|
/external/jemalloc_new/test/unit/ |
D | stats_print.c | 25 token_type_t token_type; member 43 token_init(token_t *token, parser_t *parser, token_type_t token_type, in token_init() argument 46 token->token_type = token_type; in token_init() 58 switch (token->token_type) { in token_error() 670 switch (parser->token.token_type) { in parser_parse_value() 689 assert_d_eq(parser->token.token_type, TOKEN_TYPE_STRING, in parser_parse_pair() 694 switch (parser->token.token_type) { in parser_parse_pair() 715 switch (parser->token.token_type) { in parser_parse_values() 734 assert_d_eq(parser->token.token_type, TOKEN_TYPE_LBRACKET, in parser_parse_array() 739 switch (parser->token.token_type) { in parser_parse_array() [all …]
|
/external/yapf/yapftests/ |
D | unwrapped_line_test.py | 84 def _MakeFormatTokenLeaf(token_type, token_value): argument 85 return format_token.FormatToken(pytree.Leaf(token_type, token_value)) 90 _MakeFormatTokenLeaf(token_type, token_value) 91 for token_type, token_value in token_type_values
|
/external/fonttools/Lib/fontTools/feaLib/ |
D | lexer.py | 53 token_type, token, location = self.next_() 54 if token_type != Lexer.NEWLINE: 55 return (token_type, token, location) 203 token_type, token, location = next(lexer) 207 if token_type is Lexer.NAME and token == "include": 238 return (token_type, token, location)
|
/external/libchrome/third_party/jinja2/ |
D | parser.py | 477 token_type = self.stream.current.type 478 if token_type in _compare_operators: 480 ops.append(nodes.Operand(token_type, self.parse_math1())) 537 token_type = self.stream.current.type 539 if token_type == 'sub': 542 elif token_type == 'add': 670 token_type = self.stream.current.type 671 if token_type == 'dot' or token_type == 'lbracket': 675 elif token_type == 'lparen': 683 token_type = self.stream.current.type [all …]
|
D | lexer.py | 152 def _describe_token_type(token_type): argument 153 if token_type in reverse_operators: 154 return reverse_operators[token_type] 168 }.get(token_type, token_type)
|
/external/fonttools/Lib/fontTools/voltLib/ |
D | lexer.py | 38 token_type, token, location = self.next_() 39 if token_type not in {Lexer.NEWLINE}: 40 return (token_type, token, location)
|
/external/tensorflow/tensorflow/core/platform/cloud/ |
D | oauth_client.cc | 281 string token_type; in ParseOAuthResponse() local 282 TF_RETURN_IF_ERROR(ReadJsonString(root, "token_type", &token_type)); in ParseOAuthResponse() 283 if (token_type != "Bearer") { in ParseOAuthResponse() 285 token_type); in ParseOAuthResponse()
|
D | google_auth_provider_test.cc | 160 "token_type":"Bearer" in TEST_F() 176 "token_type":"Bearer" in TEST_F()
|
/external/grpc-grpc/src/core/lib/security/credentials/oauth2/ |
D | oauth2_credentials.cc | 147 grpc_json* token_type = nullptr; in grpc_oauth2_token_fetcher_credentials_parse_server_response() local 165 token_type = ptr; in grpc_oauth2_token_fetcher_credentials_parse_server_response() 175 if (token_type == nullptr || token_type->type != GRPC_JSON_STRING) { in grpc_oauth2_token_fetcher_credentials_parse_server_response() 185 gpr_asprintf(&new_access_token, "%s %s", token_type->value, in grpc_oauth2_token_fetcher_credentials_parse_server_response()
|
/external/antlr/runtime/Ruby/lib/antlr3/tree/ |
D | debug.rb | 27 def create_from_token( token_type, from_token, text = nil ) argument 33 def create_from_type( token_type, text ) argument
|
/external/v8/src/parsing/ |
D | token.h | 220 return token_type[tok] == 'K'; in IsKeyword() 222 static bool IsContextualKeyword(Value tok) { return token_type[tok] == 'C'; } in IsContextualKeyword() 336 static const char token_type[NUM_TOKENS]; variable
|
D | token.cc | 36 const char Token::token_type[] = {TOKEN_LIST(KT, KK, KC)}; member in v8::internal::Token
|
/external/libchrome/mojo/public/tools/bindings/pylib/mojom_tests/parse/ |
D | lexer_unittest.py | 39 def _MakeLexToken(token_type, value, lineno=1, lexpos=0): argument 43 rv.type, rv.value, rv.lineno, rv.lexpos = token_type, value, lineno, lexpos
|
/external/antlr/runtime/Ruby/lib/antlr3/ |
D | tree.rb | 858 def create_from_token( token_type, from_token, text = nil ) argument 860 from_token.type = token_type 866 def create_from_type( token_type, text ) argument 867 from_token = create_token( token_type, DEFAULT_CHANNEL, text )
|