Home
last modified time | relevance | path

Searched refs:tokenize (Results 1 – 25 of 176) sorted by relevance

12345678

/third_party/python/Tools/peg_generator/pegen/
Dtokenizer.py2 import tokenize
10 def shorttok(tok: tokenize.TokenInfo) -> str:
20 _tokens: List[tokenize.TokenInfo]
22 def __init__(self, tokengen: Iterator[tokenize.TokenInfo], *, verbose: bool = False):
30 def getnext(self) -> tokenize.TokenInfo:
35 if tok.type in (tokenize.NL, tokenize.COMMENT):
47 def peek(self) -> tokenize.TokenInfo:
51 if tok.type in (tokenize.NL, tokenize.COMMENT):
58 def diagnose(self) -> tokenize.TokenInfo:
/third_party/googletest/googlemock/scripts/generator/cpp/
Dast.py44 from cpp import tokenize
549 if parts[-1].token_type == tokenize.NAME:
579 if (type_name and type_name[-1].token_type == tokenize.NAME and
580 p.token_type == tokenize.NAME):
581 type_name.append(tokenize.Token(tokenize.SYNTAX, ' ', 0, 0))
749 if token.token_type == tokenize.NAME:
768 if next.token_type == tokenize.SYNTAX and next.name == '(':
773 syntax = tokenize.SYNTAX
783 new_temp = self._GetTokensUpTo(tokenize.SYNTAX, ';')
786 last_token = tokenize.Token(tokenize.SYNTAX, ';', 0, 0)
[all …]
/third_party/python/Doc/library/
Dtokenize.rst1 :mod:`tokenize` --- Tokenizer for Python source
4 .. module:: tokenize
10 **Source code:** :source:`Lib/tokenize.py`
14 The :mod:`tokenize` module provides a lexical scanner for Python source code,
23 :term:`named tuple` returned from :func:`tokenize.tokenize`.
30 .. function:: tokenize(readline)
32 The :func:`.tokenize` generator requires one argument, *readline*, which
57 :func:`.tokenize` determines the source encoding of the file by looking for a
64 Like :func:`.tokenize`, the *readline* argument is a callable returning
69 :func:`.tokenize`. It does not yield an :data:`~token.ENCODING` token.
[all …]
/third_party/boost/libs/spirit/doc/lex/
Dtokenizing.qbk11 [heading The tokenize function]
13 The `tokenize()` function is a helper function simplifying the usage of a lexer
21 The construct used to tokenize the given input, while discarding all generated
23 API function `tokenize()` minimizing the code required:
32 bool r = tokenize(first, str.end(), word_count_lexer);
35 section __sec_lex_quickstart_2__. The function `tokenize()` will return either
44 bool tokenize(Iterator& first, Iterator last, Lexer const& lex
48 [[Iterator& first] [The beginning of the input sequence to tokenize. The
53 [[Iterator last] [The end of the input sequence to tokenize.]]
60 A second overload of the `tokenize()` function allows specifying of any arbitrary
[all …]
/third_party/python/Tools/i18n/
Dpygettext.py167 import tokenize
335 if ttype == tokenize.STRING and is_literal_string(tstring):
338 elif ttype not in (tokenize.COMMENT, tokenize.NL):
342 if ttype == tokenize.NAME and tstring in ('class', 'def'):
345 if ttype == tokenize.NAME and tstring in opts.keywords:
348 if ttype == tokenize.STRING:
402 if ttype == tokenize.OP:
413 if ttype == tokenize.STRING and is_literal_string(tstring):
416 elif ttype not in (tokenize.NEWLINE, tokenize.INDENT,
417 tokenize.COMMENT):
[all …]
/third_party/typescript/tests/baselines/reference/
DarrayAssignmentTest5.types31 tokenize(line:string, state:IState, includeStates:boolean):ILineTokens;
32 >tokenize : (line: string, state: IState, includeStates: boolean) => ILineTokens
46 var lineTokens:ILineTokens= this.tokenize(line, state, true);
48 >this.tokenize(line, state, true) : ILineTokens
49 >this.tokenize : (line: string, state: IState, includeStates: boolean) => ILineTokens
51 >tokenize : (line: string, state: IState, includeStates: boolean) => ILineTokens
79 public tokenize(line:string, state:IState, includeStates:boolean):ILineTokens {
80 >tokenize : (line: string, state: IState, includeStates: boolean) => ILineTokens
DarrayAssignmentTest5.symbols47 tokenize(line:string, state:IState, includeStates:boolean):ILineTokens;
48 >tokenize : Symbol(IMode.tokenize, Decl(arrayAssignmentTest5.ts, 16, 66))
67 var lineTokens:ILineTokens= this.tokenize(line, state, true);
70 >this.tokenize : Symbol(Bug.tokenize, Decl(arrayAssignmentTest5.ts, 26, 9))
72 >tokenize : Symbol(Bug.tokenize, Decl(arrayAssignmentTest5.ts, 26, 9))
97 public tokenize(line:string, state:IState, includeStates:boolean):ILineTokens {
98 >tokenize : Symbol(Bug.tokenize, Decl(arrayAssignmentTest5.ts, 26, 9))
DarrayAssignmentTest5.js19 tokenize(line:string, state:IState, includeStates:boolean):ILineTokens;
23 var lineTokens:ILineTokens= this.tokenize(line, state, true);
29 public tokenize(line:string, state:IState, includeStates:boolean):ILineTokens {
43 var lineTokens = this.tokenize(line, state, true);
49 Bug.prototype.tokenize = function (line, state, includeStates) { method in Bug
DarrayAssignmentTest6.symbols28 tokenize(line:string, state:IState, includeStates:boolean):ILineTokens;
29 >tokenize : Symbol(IMode.tokenize, Decl(arrayAssignmentTest6.ts, 10, 21))
40 public tokenize(line:string, tokens:IToken[], includeStates:boolean):ILineTokens {
41 >tokenize : Symbol(Bug.tokenize, Decl(arrayAssignmentTest6.ts, 13, 39))
DarrayAssignmentTest6.types19 tokenize(line:string, state:IState, includeStates:boolean):ILineTokens;
20 >tokenize : (line: string, state: IState, includeStates: boolean) => ILineTokens
28 public tokenize(line:string, tokens:IToken[], includeStates:boolean):ILineTokens {
29 >tokenize : (line: string, tokens: IToken[], includeStates: boolean) => ILineTokens
DarrayAssignmentTest6.js13 tokenize(line:string, state:IState, includeStates:boolean):ILineTokens;
16 public tokenize(line:string, tokens:IToken[], includeStates:boolean):ILineTokens {
29 Bug.prototype.tokenize = function (line, tokens, includeStates) { method in Bug
/third_party/vk-gl-cts/framework/randomshaders/
DrsgExpression.hpp57 virtual void tokenize (GeneratorState& state, TokenStream& str) const = DE_NULL;
74 …void tokenize (GeneratorState& state, TokenStream& str) const { DE_UNREF(state); str << Tok… in tokenize() function in rsg::VariableAccess
114 void tokenize (GeneratorState& state, TokenStream& str) const;
132 void tokenize (GeneratorState& state, TokenStream& str) const;
151 void tokenize (GeneratorState& state, TokenStream& str) const;
169 void tokenize (GeneratorState& state, TokenStream& str) const;
191 void tokenize (GeneratorState& state, TokenStream& str) const;
216 void tokenize (GeneratorState& state, TokenStream& str) const;
236 void tokenize (GeneratorState& state, TokenStream& str) const;
258 void tokenize (GeneratorState& state, TokenStream& str) const;
DrsgStatement.hpp44 virtual void tokenize (GeneratorState& state, TokenStream& str) const = DE_NULL;
62 void tokenize (GeneratorState& state, TokenStream& str) const;
78 void tokenize (GeneratorState& state, TokenStream& str) const;
98 void tokenize (GeneratorState& state, TokenStream& str) const;
119 void tokenize (GeneratorState& state, TokenStream& str) const;
145 void tokenize (GeneratorState& state, TokenStream& str) const;
DrsgStatement.cpp203 void BlockStatement::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::BlockStatement
208 (*i)->tokenize(state, str); in tokenize()
219 void ExpressionStatement::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::ExpressionStatement
222 m_expression->tokenize(state, str); in tokenize()
332 void DeclarationStatement::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::DeclarationStatement
339 m_expression->tokenize(state, str); in tokenize()
455 void ConditionalStatement::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::ConditionalStatement
461 m_condition->tokenize(state, str); in tokenize()
468 m_trueStatement->tokenize(state, str); in tokenize()
472 m_trueStatement->tokenize(state, str); in tokenize()
[all …]
DrsgShader.cpp93 void Shader::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::Shader
101 m_globalStatements[ndx]->tokenize(state, str); in tokenize()
107 m_functions[ndx]->tokenize(state, str); in tokenize()
112 m_mainFunction.tokenize(state, str); in tokenize()
125 void Function::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::Function
147 m_functionBlock.tokenize(state, str); in tokenize()
/third_party/python/Lib/
Dtabnanny.py25 import tokenize
26 if not hasattr(tokenize, 'NL'):
98 f = tokenize.open(file)
107 process_tokens(tokenize.generate_tokens(f.readline))
109 except tokenize.TokenError as msg:
278 INDENT = tokenize.INDENT
279 DEDENT = tokenize.DEDENT
280 NEWLINE = tokenize.NEWLINE
281 JUNK = tokenize.COMMENT, tokenize.NL
/third_party/python/Tools/scripts/
Dhighlight.py11 import tokenize
35 tok_type = tokenize.COMMENT
37 for tok in tokenize.generate_tokens(readline):
41 if tok_type == tokenize.COMMENT:
43 elif tok_type == tokenize.OP and tok_str[:1] not in '{}[](),.:;@':
45 elif tok_type == tokenize.STRING:
47 if prev_tok_type == tokenize.INDENT or scol==0:
49 elif tok_type == tokenize.NAME:
Dcleanfuture.py42 import tokenize
156 STRING = tokenize.STRING
157 NL = tokenize.NL
158 NEWLINE = tokenize.NEWLINE
159 COMMENT = tokenize.COMMENT
160 NAME = tokenize.NAME
161 OP = tokenize.OP
164 get = tokenize.generate_tokens(self.getline).__next__
Dreindent.py46 import tokenize
122 encoding, _ = tokenize.detect_encoding(f.readline)
202 tokens = tokenize.generate_tokens(self.getline)
286 INDENT=tokenize.INDENT,
287 DEDENT=tokenize.DEDENT,
288 NEWLINE=tokenize.NEWLINE,
289 COMMENT=tokenize.COMMENT,
290 NL=tokenize.NL):
Dgenerate_opcode_h.py4 import tokenize
48 if hasattr(tokenize, 'open'):
49 fp = tokenize.open(opcode_py) # Python 3.2+
/third_party/boost/libs/spirit/test/lex/
Dsemantic_actions.cpp205 BOOST_TEST(lex::tokenize(first, last, sa0)); in main()
217 BOOST_TEST(lex::tokenize(first, last, sa2)); in main()
230 BOOST_TEST(lex::tokenize(first, last, sa3)); in main()
241 BOOST_TEST(!lex::tokenize(first, last, sa3)); in main()
250 BOOST_TEST(lex::tokenize(first, last, sa3)); in main()
262 BOOST_TEST(lex::tokenize(first, last, sa4)); in main()
276 BOOST_TEST(lex::tokenize(first, last, sa4, identifier_token)); in main()
293 BOOST_TEST(lex::tokenize(first, last, sa5)); in main()
/third_party/boost/libs/range/doc/reference/adaptors/
Dtokenized.qbk26 boost::adaptors::tokenize(rng, regex)
27 boost::adaptors::tokenize(rng, regex, i)
28 boost::adaptors::tokenize(rng, regex, rndRng)
29 boost::adaptors::tokenize(rng, regex, i, flags)
30 boost::adaptors::tokenize(rng, regex, rndRng, flags)
/third_party/typescript/tests/cases/compiler/
DarrayAssignmentTest5.ts18 tokenize(line:string, state:IState, includeStates:boolean):ILineTokens; method
22 var lineTokens:ILineTokens= this.tokenize(line, state, true);
28 public tokenize(line:string, state:IState, includeStates:boolean):ILineTokens { method in Bug
/third_party/mindspore/tests/st/fl/albert/src/
Dtokenization.py103 def tokenize(self, text): member in BertTokenizer
106 for token in self.basic_tokenizer.tokenize(text):
110 for sub_token in self.wordpiece_tokenizer.tokenize(token):
113 split_tokens = self.wordpiece_tokenizer.tokenize(text)
179 def tokenize(self, text): member in BasicTokenizer
295 def tokenize(self, text): member in WordpieceTokenizer
400 def tokenize(self, text): member in CustomizedBasicTokenizer
473 def tokenize(self, text): member in CustomizedTokenizer
475 basic_tokens = self.basic_tokenizer.tokenize(text)
477 wordpiece_tokens = self.wordpiece_tokenizer.tokenize(token)
[all …]
/third_party/boost/libs/coroutine/example/asymmetric/
Dchaining.cpp28 void tokenize(coro_t::push_type& sink, coro_t::pull_type& source) in tokenize() function
143 coro_t::pull_type tokenizer(boost::bind(tokenize, _1, boost::ref(reader))); in main()
155 coro_t::pull_type tokenizer(boost::bind(tokenize, _1, boost::ref(reader))); in main()
168 coro_t::pull_type tokenizer(boost::bind(tokenize, _1, boost::ref(reader))); in main()
181 coro_t::pull_type tokenizer(boost::bind(tokenize, _1, boost::ref(reader))); in main()
192 coro_t::pull_type tokenizer(boost::bind(tokenize, _1, boost::ref(reader))); in main()

12345678