Home
last modified time | relevance | path

Searched refs:token_type (Results 1 – 25 of 65) sorted by relevance

123

/external/chromium/testing/gmock/scripts/generator/cpp/
Dtokenize.py74 def __init__(self, token_type, name, start, end): argument
75 self.token_type = token_type
147 token_type = UNKNOWN
151 token_type = NAME
159 token_type = CONSTANT
162 token_type = CONSTANT
173 token_type = SYNTAX
183 token_type = SYNTAX
186 token_type = CONSTANT
196 token_type = CONSTANT
[all …]
Dast.py549 if parts[-1].token_type == tokenize.NAME:
579 if (type_name and type_name[-1].token_type == tokenize.NAME and
580 p.token_type == tokenize.NAME):
713 if token.token_type == _INTERNAL_TOKEN:
738 if token.token_type == tokenize.NAME:
749 if next.token_type == tokenize.SYNTAX and next.name == '(':
805 elif token.token_type == tokenize.SYNTAX:
811 if (token.token_type == tokenize.NAME and
815 elif token.token_type == tokenize.PREPROCESSOR:
854 while (last_token.token_type != expected_token_type or
[all …]
/external/chromium_org/sandbox/win/tools/finder/
Dmain.cc32 sandbox::TokenLevel token_type = sandbox::USER_LOCKDOWN; in wmain() local
48 token_type = sandbox::USER_LOCKDOWN; in wmain()
50 token_type = sandbox::USER_RESTRICTED; in wmain()
52 token_type = sandbox::USER_LIMITED; in wmain()
54 token_type = sandbox::USER_INTERACTIVE; in wmain()
56 token_type = sandbox::USER_NON_ADMIN; in wmain()
58 token_type = sandbox::USER_RESTRICTED_SAME_ACCESS; in wmain()
60 token_type = sandbox::USER_UNPROTECTED; in wmain()
141 finder_obj.Init(token_type, object_type, access_type, file_output); in wmain()
Dfinder.cc24 DWORD Finder::Init(sandbox::TokenLevel token_type, in Init() argument
38 err_code = sandbox::CreateRestrictedToken(&token_handle_, token_type, in Init()
/external/chromium/testing/gtest/scripts/
Dpump.py140 def __init__(self, start=None, end=None, value=None, token_type=None): argument
150 self.token_type = token_type
154 self.start, self.value, self.token_type)
160 self.token_type)
171 for (regex, token_type) in token_table:
178 best_match_token_type = token_type
197 (start_column, length, token_type) = m
202 return MakeToken(lines, found_start, found_end, token_type)
237 def MakeToken(lines, start, end, token_type): argument
240 return Token(start, end, SubString(lines, start, end), token_type)
[all …]
/external/chromium_org/testing/gtest/scripts/
Dpump.py140 def __init__(self, start=None, end=None, value=None, token_type=None): argument
150 self.token_type = token_type
154 self.start, self.value, self.token_type)
160 self.token_type)
171 for (regex, token_type) in token_table:
178 best_match_token_type = token_type
197 (start_column, length, token_type) = m
202 return MakeToken(lines, found_start, found_end, token_type)
237 def MakeToken(lines, start, end, token_type): argument
240 return Token(start, end, SubString(lines, start, end), token_type)
[all …]
/external/gtest/scripts/
Dpump.py140 def __init__(self, start=None, end=None, value=None, token_type=None): argument
150 self.token_type = token_type
154 self.start, self.value, self.token_type)
160 self.token_type)
171 for (regex, token_type) in token_table:
178 best_match_token_type = token_type
197 (start_column, length, token_type) = m
202 return MakeToken(lines, found_start, found_end, token_type)
237 def MakeToken(lines, start, end, token_type): argument
240 return Token(start, end, SubString(lines, start, end), token_type)
[all …]
/external/chromium-trace/trace-viewer/third_party/closure_linter/closure_linter/
Dindentation.py139 token_type = token.type
145 if token_type == Type.END_PAREN:
148 elif token_type == Type.END_PARAMETERS:
151 elif token_type == Type.END_BRACKET:
154 elif token_type == Type.END_BLOCK:
181 elif token_type == Type.KEYWORD and token.string in ('case', 'default'):
191 elif token_type == Type.SEMICOLON:
194 not_binary_operator = (token_type != Type.OPERATOR or
212 if token_type in Type.COMMENT_TYPES:
232 if token_type == Type.START_BRACKET:
[all …]
Decmametadatapass.py264 token_type = token.type
279 if token_type == TokenType.START_PAREN:
287 elif token_type == TokenType.END_PAREN:
315 elif (token_type == TokenType.KEYWORD and
323 elif token_type == TokenType.START_PARAMETERS:
326 elif token_type == TokenType.END_PARAMETERS:
329 elif token_type == TokenType.START_BRACKET:
336 elif token_type == TokenType.END_BRACKET:
339 elif token_type == TokenType.START_BLOCK:
354 elif token_type == TokenType.END_BLOCK:
[all …]
/external/chromium_org/third_party/closure_linter/closure_linter/
Dindentation.py139 token_type = token.type
145 if token_type == Type.END_PAREN:
148 elif token_type == Type.END_PARAMETERS:
151 elif token_type == Type.END_BRACKET:
154 elif token_type == Type.END_BLOCK:
181 elif token_type == Type.KEYWORD and token.string in ('case', 'default'):
191 elif token_type == Type.SEMICOLON:
194 not_binary_operator = (token_type != Type.OPERATOR or
212 if token_type in Type.COMMENT_TYPES:
232 if token_type == Type.START_BRACKET:
[all …]
Decmametadatapass.py264 token_type = token.type
279 if token_type == TokenType.START_PAREN:
287 elif token_type == TokenType.END_PAREN:
315 elif (token_type == TokenType.KEYWORD and
323 elif token_type == TokenType.START_PARAMETERS:
326 elif token_type == TokenType.END_PARAMETERS:
329 elif token_type == TokenType.START_BRACKET:
336 elif token_type == TokenType.END_BRACKET:
339 elif token_type == TokenType.START_BLOCK:
354 elif token_type == TokenType.END_BLOCK:
[all …]
/external/openfst/src/include/fst/extensions/far/
Dprint-strings.h46 typename StringPrinter<Arc>::TokenType token_type; in FarPrintStrings() local
48 token_type = StringPrinter<Arc>::SYMBOL; in FarPrintStrings()
50 token_type = StringPrinter<Arc>::BYTE; in FarPrintStrings()
52 token_type = StringPrinter<Arc>::UTF8; in FarPrintStrings()
95 token_type, syms ? syms : fst.InputSymbols()); in FarPrintStrings()
127 if (token_type == StringPrinter<Arc>::SYMBOL) in FarPrintStrings()
Dcompile-strings.h54 TokenType token_type,
59 token_type_(token_type), symbols_(syms), done_(false),
60 compiler_(token_type, syms, unknown_label, allow_negative_labels) {
166 typename StringCompiler<Arc>::TokenType token_type; in FarCompileStrings() local
168 token_type = StringCompiler<Arc>::SYMBOL; in FarCompileStrings()
170 token_type = StringCompiler<Arc>::BYTE; in FarCompileStrings()
172 token_type = StringCompiler<Arc>::UTF8; in FarCompileStrings()
246 entry_type, token_type, allow_negative_labels, in FarCompileStrings()
Dfarscript.h211 const FarTokenType token_type; member
224 const FarTokenType token_type, const string &begin_key, in FarPrintStringsArgs()
229 ifilenames(ifilenames), entry_type(entry_type), token_type(token_type), in FarPrintStringsArgs()
240 args->ifilenames, args->entry_type, args->token_type, in FarPrintStrings()
250 const FarTokenType token_type,
/external/chromium_org/third_party/closure_linter/closure_linter/common/
Dtokens.py50 def __init__(self, string, token_type, line, line_number, values=None): argument
62 self.type = token_type
97 def IsType(self, token_type): argument
106 return self.type == token_type
Dmatcher.py46 def __init__(self, regex, token_type, result_mode=None, line_start=False): argument
58 self.type = token_type
Dtokenizer.py78 def _CreateToken(self, string, token_type, line, line_number, values=None): argument
93 return tokens.Token(string, token_type, line, line_number, values)
/external/chromium-trace/trace-viewer/third_party/closure_linter/closure_linter/common/
Dtokens.py50 def __init__(self, string, token_type, line, line_number, values=None): argument
62 self.type = token_type
97 def IsType(self, token_type): argument
106 return self.type == token_type
Dmatcher.py46 def __init__(self, regex, token_type, result_mode=None, line_start=False): argument
58 self.type = token_type
Dtokenizer.py78 def _CreateToken(self, string, token_type, line, line_number, values=None): argument
93 return tokens.Token(string, token_type, line, line_number, values)
/external/chromium_org/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/
Dpep8.py369 token_type, text, start, end, line = tokens[index]
370 if (token_type == tokenize.OP and
377 prev_type = token_type
445 for token_type, text, start, end, line in tokens:
446 if token_type in (tokenize.NL, tokenize.NEWLINE, tokenize.ERRORTOKEN):
457 elif token_type == tokenize.OP:
471 prev_type = token_type
551 for token_type, text, start, end, line in tokens:
552 if token_type == tokenize.NL:
554 if token_type == tokenize.COMMENT:
[all …]
/external/bison/examples/calc++/
Dcalc++-scanner.ll16 /* By default yylex returns int, we use token_type.
18 not of token_type. */
44 [-+*/] return yy::calcxx_parser::token_type (yytext[0]);
/external/chromium_org/third_party/jinja2/
Dparser.py431 token_type = self.stream.current.type
432 if token_type in _compare_operators:
434 ops.append(nodes.Operand(token_type, self.parse_add()))
529 token_type = self.stream.current.type
531 if token_type == 'sub':
534 elif token_type == 'add':
662 token_type = self.stream.current.type
663 if token_type == 'dot' or token_type == 'lbracket':
667 elif token_type == 'lparen':
675 token_type = self.stream.current.type
[all …]
/external/antlr/antlr-3.4/runtime/Ruby/lib/antlr3/tree/
Ddebug.rb27 def create_from_token( token_type, from_token, text = nil ) argument
33 def create_from_type( token_type, text ) argument
/external/openfst/src/extensions/far/
Dfarscript.cc105 const FarTokenType token_type, in FarPrintStrings() argument
115 FarPrintStringsArgs args(ifilenames, entry_type, token_type, in FarPrintStrings()

123