Home
last modified time | relevance | path

Searched refs:token_type (Results 1 – 25 of 39) sorted by relevance

12

/external/autotest/client/common_lib/cros/
Ddbus_send.py50 token_type = token_stream.pop(0)
51 if token_type == 'variant':
52 token_type = token_stream.pop(0)
53 if token_type == 'object':
54 token_type = token_stream.pop(0) # Should be 'path'
58 if token_type in INT_TYPES:
60 if token_type == 'string' or token_type == 'path':
62 if token_type == 'boolean':
64 if token_type == 'double':
66 if token_type == 'array':
[all …]
/external/googletest/googlemock/scripts/generator/cpp/
Dtokenize.py71 def __init__(self, token_type, name, start, end): argument
72 self.token_type = token_type
144 token_type = UNKNOWN
148 token_type = NAME
156 token_type = CONSTANT
159 token_type = CONSTANT
170 token_type = SYNTAX
180 token_type = SYNTAX
183 token_type = CONSTANT
193 token_type = CONSTANT
[all …]
Dast.py549 if parts[-1].token_type == tokenize.NAME:
579 if (type_name and type_name[-1].token_type == tokenize.NAME and
580 p.token_type == tokenize.NAME):
724 if token.token_type == _INTERNAL_TOKEN:
749 if token.token_type == tokenize.NAME:
768 if next.token_type == tokenize.SYNTAX and next.name == '(':
825 elif token.token_type == tokenize.SYNTAX:
831 if (token.token_type == tokenize.NAME and
835 elif token.token_type == tokenize.PREPROCESSOR:
874 while (last_token.token_type != expected_token_type or
[all …]
/external/python/cpython3/Lib/email/
D_header_value_parser.py117 token_type = None variable in TokenList
168 self.token_type)
190 return [x.content for x in self if x.token_type=='comment']
194 token_type = 'unstructured' variable in UnstructuredTokenList
198 token_type = 'phrase' variable in Phrase
201 token_type = 'word' variable in Word
205 token_type = 'cfws' variable in CFWSList
209 token_type = 'atom' variable in Atom
213 token_type = 'token' variable in Token
218 token_type = 'encoded-word' variable in EncodedWord
[all …]
/external/python/cpython3/Lib/test/test_email/
Dtest__header_value_parser.py17 self.assertEqual(x.token_type, 'fws')
70 self.assertEqual(fws.token_type, 'fws')
419 self.assertEqual(ptext.token_type, 'ptext')
471 self.assertEqual(ptext.token_type, 'ptext')
511 self.assertEqual(atext.token_type, 'atext')
541 self.assertEqual(bqs.token_type, 'bare-quoted-string')
609 self.assertEqual(comment.token_type, 'comment')
688 self.assertEqual(cfws.token_type, 'cfws')
736 self.assertEqual(qs.token_type, 'quoted-string')
819 self.assertEqual(atom.token_type, 'atom')
[all …]
/external/chromium-trace/catapult/common/py_utils/py_utils/refactor/
Doffset_token.py31 def __init__(self, token_type, string, offset): argument
32 self._type = token_type
75 token_type, string, (srow, scol), _, _ = next_token
77 offset_tokens.append(OffsetToken(token_type, string, (0, 0)))
81 offset_tokens.append(OffsetToken(token_type, string, (0, scol - ecol)))
84 token_type, string, (srow - erow, scol)))
Dsnippet.py99 def __init__(self, token_type, tokens): argument
104 self._type = token_type
109 def Create(cls, token_type, string, offset=(0, 0)): argument
110 return cls(token_type,
111 [offset_token.OffsetToken(token_type, string, offset)])
/external/jemalloc_new/test/unit/
Dstats_print.c25 token_type_t token_type; member
43 token_init(token_t *token, parser_t *parser, token_type_t token_type, in token_init() argument
46 token->token_type = token_type; in token_init()
58 switch (token->token_type) { in token_error()
670 switch (parser->token.token_type) { in parser_parse_value()
689 assert_d_eq(parser->token.token_type, TOKEN_TYPE_STRING, in parser_parse_pair()
694 switch (parser->token.token_type) { in parser_parse_pair()
715 switch (parser->token.token_type) { in parser_parse_values()
734 assert_d_eq(parser->token.token_type, TOKEN_TYPE_LBRACKET, in parser_parse_array()
739 switch (parser->token.token_type) { in parser_parse_array()
[all …]
/external/yapf/yapftests/
Dunwrapped_line_test.py84 def _MakeFormatTokenLeaf(token_type, token_value): argument
85 return format_token.FormatToken(pytree.Leaf(token_type, token_value))
90 _MakeFormatTokenLeaf(token_type, token_value)
91 for token_type, token_value in token_type_values
/external/fonttools/Lib/fontTools/feaLib/
Dlexer.py54 token_type, token, location = self.next_()
55 if token_type != Lexer.NEWLINE:
56 return (token_type, token, location)
229 token_type, token, location = next(lexer)
233 if token_type is Lexer.NAME and token == "include":
261 return (token_type, token, location)
/external/fonttools/Lib/fontTools/voltLib/
Dlexer.py36 token_type, token, location = self.next_()
37 if token_type not in {Lexer.NEWLINE}:
38 return (token_type, token, location)
/external/libchrome/third_party/jinja2/
Dparser.py477 token_type = self.stream.current.type
478 if token_type in _compare_operators:
480 ops.append(nodes.Operand(token_type, self.parse_math1()))
537 token_type = self.stream.current.type
539 if token_type == 'sub':
542 elif token_type == 'add':
670 token_type = self.stream.current.type
671 if token_type == 'dot' or token_type == 'lbracket':
675 elif token_type == 'lparen':
683 token_type = self.stream.current.type
[all …]
Dlexer.py152 def _describe_token_type(token_type): argument
153 if token_type in reverse_operators:
154 return reverse_operators[token_type]
168 }.get(token_type, token_type)
/external/python/jinja/src/jinja2/
Dparser.py488 token_type = self.stream.current.type
489 if token_type in _compare_operators:
491 ops.append(nodes.Operand(token_type, self.parse_math1()))
549 token_type = self.stream.current.type
551 if token_type == "sub":
554 elif token_type == "add":
691 token_type = self.stream.current.type
692 if token_type == "dot" or token_type == "lbracket":
696 elif token_type == "lparen":
704 token_type = self.stream.current.type
[all …]
Dlexer.py144 def _describe_token_type(token_type): argument
145 if token_type in reverse_operators:
146 return reverse_operators[token_type]
160 }.get(token_type, token_type)
/external/tensorflow/tensorflow/core/platform/cloud/
Doauth_client.cc282 string token_type; in ParseOAuthResponse() local
283 TF_RETURN_IF_ERROR(ReadJsonString(root, "token_type", &token_type)); in ParseOAuthResponse()
284 if (token_type != "Bearer") { in ParseOAuthResponse()
286 token_type); in ParseOAuthResponse()
Dgoogle_auth_provider_test.cc162 "token_type":"Bearer" in TEST_F()
178 "token_type":"Bearer" in TEST_F()
/external/tensorflow/tensorflow/compiler/mlir/xla/transforms/
Dlegalize_tf_communication.cc457 auto token_type = TokenType::get(builder.getContext()); in GetTypeWithToken() local
460 result_types.push_back(token_type); in GetTypeWithToken()
464 return builder.getTupleType({type, token_type}); in GetTypeWithToken()
514 Type token_type) { in UpdateControlFlowBlockArgWithToken() argument
517 auto new_arg = block.addArgument(token_type); in UpdateControlFlowBlockArgWithToken()
522 token_type.cast<TupleType>().size() - 1) in UpdateControlFlowBlockArgWithToken()
750 auto token_type = TokenType::get(context); in RewriteFunction() local
754 rewrite_block ? func_body.addArgument(token_type) in RewriteFunction()
755 : builder.create<CreateTokenOp>(func.getLoc(), token_type) in RewriteFunction()
/external/antlr/runtime/Ruby/lib/antlr3/tree/
Ddebug.rb27 def create_from_token( token_type, from_token, text = nil ) argument
33 def create_from_type( token_type, text ) argument
/external/grpc-grpc/src/core/lib/security/credentials/oauth2/
Doauth2_credentials.cc147 grpc_json* token_type = nullptr; in grpc_oauth2_token_fetcher_credentials_parse_server_response() local
165 token_type = ptr; in grpc_oauth2_token_fetcher_credentials_parse_server_response()
175 if (token_type == nullptr || token_type->type != GRPC_JSON_STRING) { in grpc_oauth2_token_fetcher_credentials_parse_server_response()
185 gpr_asprintf(&new_access_token, "%s %s", token_type->value, in grpc_oauth2_token_fetcher_credentials_parse_server_response()
/external/libtextclassifier/native/utils/
Dtokenizer.cc294 int codepoint_index, NumberTokenType token_type, in NumberTokenize()
296 if (current_token_type != token_type) { in NumberTokenize()
303 current_token_type = token_type; in NumberTokenize()
/external/libchrome/mojo/public/tools/bindings/pylib/mojom_tests/parse/
Dlexer_unittest.py39 def _MakeLexToken(token_type, value, lineno=1, lexpos=0): argument
43 rv.type, rv.value, rv.lineno, rv.lexpos = token_type, value, lineno, lexpos
/external/antlr/runtime/Ruby/lib/antlr3/
Dtree.rb858 def create_from_token( token_type, from_token, text = nil ) argument
860 from_token.type = token_type
866 def create_from_type( token_type, text ) argument
867 from_token = create_token( token_type, DEFAULT_CHANNEL, text )
/external/python/cpython3/Lib/
Dtokenize.py659 token_type = token.type
661 token_type = token.exact_type
664 (token_range, tok_name[token_type], token.string))
/external/rust/crates/grpcio-sys/grpc/src/core/lib/security/credentials/oauth2/
Doauth2_credentials.cc164 const char* token_type = nullptr; in grpc_oauth2_token_fetcher_credentials_parse_server_response() local
196 token_type = it->second.string_value().c_str(); in grpc_oauth2_token_fetcher_credentials_parse_server_response()
210 absl::StrCat(token_type, " ", access_token))); in grpc_oauth2_token_fetcher_credentials_parse_server_response()

12