Home
last modified time | relevance | path

Searched refs:token_type (Results 1 – 25 of 38) sorted by relevance

12

/external/autotest/client/common_lib/cros/
Ddbus_send.py47 token_type = token_stream.pop(0)
48 if token_type == 'variant':
49 token_type = token_stream.pop(0)
50 if token_type == 'object':
51 token_type = token_stream.pop(0) # Should be 'path'
55 if token_type in INT_TYPES:
57 if token_type == 'string' or token_type == 'path':
59 if token_type == 'boolean':
61 if token_type == 'double':
63 if token_type == 'array':
[all …]
/external/google-breakpad/src/testing/scripts/generator/cpp/
Dtokenize.py74 def __init__(self, token_type, name, start, end): argument
75 self.token_type = token_type
147 token_type = UNKNOWN
151 token_type = NAME
159 token_type = CONSTANT
162 token_type = CONSTANT
173 token_type = SYNTAX
183 token_type = SYNTAX
186 token_type = CONSTANT
196 token_type = CONSTANT
[all …]
Dast.py549 if parts[-1].token_type == tokenize.NAME:
579 if (type_name and type_name[-1].token_type == tokenize.NAME and
580 p.token_type == tokenize.NAME):
713 if token.token_type == _INTERNAL_TOKEN:
738 if token.token_type == tokenize.NAME:
749 if next.token_type == tokenize.SYNTAX and next.name == '(':
805 elif token.token_type == tokenize.SYNTAX:
811 if (token.token_type == tokenize.NAME and
815 elif token.token_type == tokenize.PREPROCESSOR:
854 while (last_token.token_type != expected_token_type or
[all …]
/external/protobuf/gtest/scripts/
Dpump.py141 def __init__(self, start=None, end=None, value=None, token_type=None): argument
151 self.token_type = token_type
155 self.start, self.value, self.token_type)
161 self.token_type)
172 for (regex, token_type) in token_table:
179 best_match_token_type = token_type
198 (start_column, length, token_type) = m
203 return MakeToken(lines, found_start, found_end, token_type)
227 def MakeToken(lines, start, end, token_type): argument
230 return Token(start, end, SubString(lines, start, end), token_type)
[all …]
/external/google-breakpad/src/testing/gtest/scripts/
Dpump.py140 def __init__(self, start=None, end=None, value=None, token_type=None): argument
150 self.token_type = token_type
154 self.start, self.value, self.token_type)
160 self.token_type)
171 for (regex, token_type) in token_table:
178 best_match_token_type = token_type
197 (start_column, length, token_type) = m
202 return MakeToken(lines, found_start, found_end, token_type)
237 def MakeToken(lines, start, end, token_type): argument
240 return Token(start, end, SubString(lines, start, end), token_type)
[all …]
/external/vulkan-validation-layers/tests/gtest-1.7.0/scripts/
Dpump.py140 def __init__(self, start=None, end=None, value=None, token_type=None): argument
150 self.token_type = token_type
154 self.start, self.value, self.token_type)
160 self.token_type)
171 for (regex, token_type) in token_table:
178 best_match_token_type = token_type
197 (start_column, length, token_type) = m
202 return MakeToken(lines, found_start, found_end, token_type)
237 def MakeToken(lines, start, end, token_type): argument
240 return Token(start, end, SubString(lines, start, end), token_type)
[all …]
/external/chromium-trace/catapult/third_party/html5lib-python/html5lib/
Dsanitizer.py183 token_type = token["type"]
184 if token_type in list(tokenTypes.keys()):
185 token_type = tokenTypes[token_type]
187 if token_type in (tokenTypes["StartTag"], tokenTypes["EndTag"],
190 return self.allowed_token(token, token_type)
192 return self.disallowed_token(token, token_type)
193 elif token_type == tokenTypes["Comment"]:
198 def allowed_token(self, token, token_type): argument
235 def disallowed_token(self, token, token_type): argument
236 if token_type == tokenTypes["EndTag"]:
/external/chromium-trace/catapult/catapult_base/catapult_base/refactor/
Doffset_token.py26 def __init__(self, token_type, string, offset): argument
27 self._type = token_type
70 token_type, string, (srow, scol), _, _ = next_token
72 offset_tokens.append(OffsetToken(token_type, string, (0, 0)))
76 offset_tokens.append(OffsetToken(token_type, string, (0, scol - ecol)))
79 token_type, string, (srow - erow, scol)))
Dsnippet.py97 def __init__(self, token_type, tokens): argument
102 self._type = token_type
107 def Create(cls, token_type, string, offset=(0, 0)): argument
108 return cls(token_type,
109 [offset_token.OffsetToken(token_type, string, offset)])
/external/chromium-trace/catapult/third_party/closure_linter/closure_linter/
Dindentation.py139 token_type = token.type
145 if token_type == Type.END_PAREN:
148 elif token_type == Type.END_PARAMETERS:
151 elif token_type == Type.END_BRACKET:
154 elif token_type == Type.END_BLOCK:
181 elif token_type == Type.KEYWORD and token.string in ('case', 'default'):
184 elif token_type == Type.SEMICOLON:
188 token_type not in (Type.COMMENT, Type.DOC_PREFIX, Type.STRING_TEXT)):
202 if token_type in Type.COMMENT_TYPES:
221 if token_type == Type.START_BRACKET:
[all …]
Decmametadatapass.py311 token_type = token.type
326 if token_type == TokenType.START_PAREN:
334 elif token_type == TokenType.END_PAREN:
362 elif (token_type == TokenType.KEYWORD and
370 elif token_type == TokenType.START_PARAMETERS:
373 elif token_type == TokenType.END_PARAMETERS:
376 elif token_type == TokenType.START_BRACKET:
383 elif token_type == TokenType.END_BRACKET:
386 elif token_type == TokenType.START_BLOCK:
401 elif token_type == TokenType.END_BLOCK:
[all …]
Decmalintrules.py314 token_type = token.type
326 if token_type == Type.PARAMETERS:
341 elif (token_type == Type.START_BLOCK and
345 elif token_type == Type.END_BLOCK:
385 elif token_type == Type.SEMICOLON:
422 elif token_type == Type.START_PAREN:
446 elif token_type == Type.START_BRACKET:
448 elif token_type in (Type.END_PAREN, Type.END_BRACKET):
462 elif token_type == Type.WHITESPACE:
494 elif token_type == Type.OPERATOR:
[all …]
Djavascripttokenizer.py452 def _CreateToken(self, string, token_type, line, line_number, values=None): argument
464 return javascripttokens.JavaScriptToken(string, token_type, line,
/external/chromium-trace/catapult/third_party/closure_linter/closure_linter/common/
Dtokens.py50 def __init__(self, string, token_type, line, line_number, values=None, argument
66 self.type = token_type
103 def IsType(self, token_type): argument
112 return self.type == token_type
Dmatcher.py46 def __init__(self, regex, token_type, result_mode=None, line_start=False): argument
58 self.type = token_type
Dtokenizer.py78 def _CreateToken(self, string, token_type, line, line_number, values=None): argument
93 return tokens.Token(string, token_type, line, line_number, values,
/external/bison/examples/calc++/
Dcalc++-scanner.ll16 /* By default yylex returns int, we use token_type.
18 not of token_type. */
44 [-+*/] return yy::calcxx_parser::token_type (yytext[0]);
Dcalc++-driver.hh10 yy::calcxx_parser::token_type \
Dcalc++-parser.hh106 typedef token::yytokentype token_type; typedef in yy::calcxx_parser
/external/opencv3/3rdparty/jinja2/
Dparser.py431 token_type = self.stream.current.type
432 if token_type in _compare_operators:
434 ops.append(nodes.Operand(token_type, self.parse_add()))
529 token_type = self.stream.current.type
531 if token_type == 'sub':
534 elif token_type == 'add':
662 token_type = self.stream.current.type
663 if token_type == 'dot' or token_type == 'lbracket':
667 elif token_type == 'lparen':
675 token_type = self.stream.current.type
[all …]
Dlexer.py145 def _describe_token_type(token_type): argument
146 if token_type in reverse_operators:
147 return reverse_operators[token_type]
161 }.get(token_type, token_type)
/external/antlr/antlr-3.4/runtime/Ruby/lib/antlr3/tree/
Ddebug.rb27 def create_from_token( token_type, from_token, text = nil ) argument
33 def create_from_type( token_type, text ) argument
/external/v8/src/parsing/
Dtoken.cc35 const char Token::token_type[] = { member in v8::internal::Token
Dtoken.h194 return token_type[tok] == 'K'; in IsKeyword()
318 static const char token_type[NUM_TOKENS]; variable
/external/antlr/antlr-3.4/runtime/Ruby/lib/antlr3/
Dtree.rb858 def create_from_token( token_type, from_token, text = nil ) argument
860 from_token.type = token_type
866 def create_from_type( token_type, text ) argument
867 from_token = create_token( token_type, DEFAULT_CHANNEL, text )

12