Home
last modified time | relevance | path

Searched full:tokens (Results 1 – 25 of 44) sorted by relevance

12

/arkcompiler/runtime_core/static_core/assembler/
Dcontext.cpp28 tokens = t; in Make()
34 … token = std::string_view(&*(tokens[number - 1].wholeLine.begin() + tokens[number - 1].boundLeft), in Make()
35 tokens[number - 1].boundRight - tokens[number - 1].boundLeft); in Make()
37 id = this->tokens[number - 1].type; in Make()
100 if (this->tokens.size() > number) { in Next()
101 return this->tokens[number].type; in Next()
104 return this->tokens[number - 1].type; in Next()
128 return this->tokens.size() < number + 1; in NextMask()
136 if (this->tokens.size() > number) { in operator ++()
139 id = this->tokens[number - 1].type; in operator ++()
[all …]
Dassembly-parser.h112 … context_.tokens[static_cast<int>(context_.number) + tokenShift - 1].boundLeft + shift,
113 context_.tokens[static_cast<int>(context_.number) + tokenShift - 1].boundRight,
114 context_.tokens[static_cast<int>(context_.number) + tokenShift - 1].wholeLine);
121 … context_.tokens[context_.number - 1].boundLeft + static_cast<size_t>(shift),
122 context_.tokens[context_.number - 1].boundRight,
123 … context_.tokens[context_.number - 1].wholeLine, Error::ErrorClass::WARNING);
129 return SourcePosition {lineStric_, context_.tokens[context_.number - 1].boundLeft}; in GetCurrentPosition()
131 return SourcePosition {lineStric_, context_.tokens[context_.number - 1].boundRight}; in GetCurrentPosition()
204 void ParseAsCatchall(const std::vector<Token> &tokens);
205 …void ParseAsLanguage(const std::vector<Token> &tokens, bool &isLangParsed, bool &isFirstStatement);
[all …]
Dlexer.h68 size_t boundLeft; /* right and left bounds of tokens */
80 using Tokens = std::pair<std::vector<Token>, Error>; variable
85 std::vector<Token> tokens; member
103 * Returns a vector of tokens.
105 PANDA_PUBLIC_API Tokens TokenizeString(const std::string &sourceStr);
Dpandasm.cpp110 bool Tokenize(panda::pandasm::Lexer &lexer, std::vector<std::vector<panda::pandasm::Token>> &tokens, in Tokenize() argument
116 panda::pandasm::Tokens q = lexer.TokenizeString(s); in Tokenize()
121 e.lineNumber = tokens.size() + 1; in Tokenize()
126 tokens.push_back(q.first); in Tokenize()
132 …rseProgram(panda::pandasm::Parser &parser, std::vector<std::vector<panda::pandasm::Token>> &tokens, in ParseProgram() argument
136 res = parser.Parse(tokens, inputFile.GetValue()); in ParseProgram()
260 std::vector<std::vector<panda::pandasm::Token>> tokens; in main() local
262 if (!Tokenize(lexer, tokens, inputfile)) { in main()
271 if (!panda::pandasm::ParseProgram(parser, tokens, inputFile, res)) { in main()
Dassembly-parser.cpp147 << "): " << context_.tokens[context_.number - 1].wholeLine; in ParseFieldType()
217 void Parser::ParseAsArray(const std::vector<Token> &tokens) in ParseAsArray() argument
219 …LOG(DEBUG, ASSEMBLER) << "started parsing of array (line " << lineStric_ << "): " << tokens[0].who… in ParseAsArray()
236 … LOG(DEBUG, ASSEMBLER) << "array body is open, line " << lineStric_ << ": " << tokens[0].wholeLine; in ParseAsArray()
266 …LOG(DEBUG, ASSEMBLER) << "array body is closed, line " << lineStric_ << ": " << tokens[0].wholeLin… in ParseAsArray()
380 << "): " << context_.tokens[context_.number - 1].wholeLine; in ParseArrayElementType()
550 void Parser::ParseAsRecord(const std::vector<Token> &tokens) in ParseAsRecord() argument
552 …LOG(DEBUG, ASSEMBLER) << "started parsing of record (line " << lineStric_ << "): " << tokens[0].wh… in ParseAsRecord()
571 …LOG(DEBUG, ASSEMBLER) << "record body is open, line " << lineStric_ << ": " << tokens[0].wholeLine; in ParseAsRecord()
582 …G(DEBUG, ASSEMBLER) << "record body is closed, line " << lineStric_ << ": " << tokens[0].wholeLine; in ParseAsRecord()
[all …]
Dlexer.cpp159 Tokens Lexer::TokenizeString(const std::string &sourceStr) in TokenizeString()
173 LOG(DEBUG, ASSEMBLER) << " tokens identified: "; in TokenizeString()
175 for (const auto &fI : lines_.back().tokens) { in TokenizeString()
183 return std::pair<std::vector<Token>, Error>(lines_.back().tokens, err_); in TokenizeString()
280 * Tokens handling: set a corresponding
281 * elements bound_left and bound_right of the array tokens
323 << "token " << currLine_->tokens.size() + 1 << "): " in LexTokens()
328 …currLine_->tokens.emplace_back(boundLeft, boundRight, LexGetType(boundLeft, boundRight), currLine_… in LexTokens()
335 LOG(DEBUG, ASSEMBLER) << "all tokens identified (line " << lines_.size() << ")"; in LexTokens()
Dpandasm.h32 bool Tokenize(panda::pandasm::Lexer &lexer, std::vector<std::vector<panda::pandasm::Token>> &tokens,
35 …rseProgram(panda::pandasm::Parser &parser, std::vector<std::vector<panda::pandasm::Token>> &tokens,
Dassembly-context.h33 * Used to move around tokens.
46 std::vector<panda::pandasm::Token> tokens; /* token list */ member
/arkcompiler/runtime_core/assembler/
Dcontext.cpp28 tokens = t; in Make()
34 …token = std::string_view(&*(tokens[number - 1].whole_line.begin() + tokens[number - 1].bound_left), in Make()
35 tokens[number - 1].bound_right - tokens[number - 1].bound_left); in Make()
37 id = this->tokens[number - 1].type; in Make()
100 if (this->tokens.size() > number) { in Next()
101 return this->tokens[number].type; in Next()
104 return this->tokens[number - 1].type; in Next()
128 return this->tokens.size() < number + 1; in NextMask()
136 if (this->tokens.size() > number) { in operator ++()
139 id = this->tokens[number - 1].type; in operator ++()
[all …]
Dassembly-parser.h111 … context_.tokens[static_cast<int>(context_.number) + token_shift - 1].bound_left + shift,
112 … context_.tokens[static_cast<int>(context_.number) + token_shift - 1].bound_right,
113 … context_.tokens[static_cast<int>(context_.number) + token_shift - 1].whole_line);
120 … context_.tokens[context_.number - 1].bound_left + static_cast<size_t>(shift),
121 context_.tokens[context_.number - 1].bound_right,
122 … context_.tokens[context_.number - 1].whole_line, Error::ErrorClass::WARNING);
128 return SourcePosition {line_stric_, context_.tokens[context_.number - 1].bound_left}; in GetCurrentPosition()
130 return SourcePosition {line_stric_, context_.tokens[context_.number - 1].bound_right}; in GetCurrentPosition()
201 void ParseAsCatchall(const std::vector<Token> &tokens);
202 …void ParseAsLanguage(const std::vector<Token> &tokens, bool &is_lang_parsed, bool &is_first_statem…
[all …]
Dlexer.h62 size_t bound_left; /* right and left bounds of tokens */
74 using Tokens = std::pair<std::vector<Token>, Error>; variable
79 std::vector<Token> tokens; member
96 * Returns a vector of tokens.
98 Tokens TokenizeString(const std::string &);
Dpandasm.cpp107 bool Tokenize(panda::pandasm::Lexer &lexer, std::vector<std::vector<panda::pandasm::Token>> &tokens, in Tokenize() argument
113 panda::pandasm::Tokens q = lexer.TokenizeString(s); in Tokenize()
118 e.line_number = tokens.size() + 1; in Tokenize()
123 tokens.push_back(q.first); in Tokenize()
129 …rseProgram(panda::pandasm::Parser &parser, std::vector<std::vector<panda::pandasm::Token>> &tokens, in ParseProgram() argument
133 res = parser.Parse(tokens, input_file.GetValue()); in ParseProgram()
244 std::vector<std::vector<panda::pandasm::Token>> tokens; in main() local
246 if (!Tokenize(lexer, tokens, inputfile)) { in main()
255 if (!panda::pandasm::ParseProgram(parser, tokens, input_file, res)) { in main()
Dassembly-parser.cpp146 << "): " << context_.tokens[context_.number - 1].whole_line; in ParseFieldType()
220 void Parser::ParseAsArray(const std::vector<Token> &tokens) in ParseAsArray() argument
222 …LOG(DEBUG, ASSEMBLER) << "started parsing of array (line " << line_stric_ << "): " << tokens[0].wh… in ParseAsArray()
239 …LOG(DEBUG, ASSEMBLER) << "array body is open, line " << line_stric_ << ": " << tokens[0].whole_lin… in ParseAsArray()
269 …LOG(DEBUG, ASSEMBLER) << "array body is closed, line " << line_stric_ << ": " << tokens[0].whole_l… in ParseAsArray()
375 << "): " << context_.tokens[context_.number - 1].whole_line; in ParseArrayElementType()
535 void Parser::ParseAsRecord(const std::vector<Token> &tokens) in ParseAsRecord() argument
537 …LOG(DEBUG, ASSEMBLER) << "started parsing of record (line " << line_stric_ << "): " << tokens[0].w… in ParseAsRecord()
557 …G(DEBUG, ASSEMBLER) << "record body is open, line " << line_stric_ << ": " << tokens[0].whole_line; in ParseAsRecord()
569 …DEBUG, ASSEMBLER) << "record body is closed, line " << line_stric_ << ": " << tokens[0].whole_line; in ParseAsRecord()
[all …]
Dlexer.cpp162 Tokens Lexer::TokenizeString(const std::string &source_str) in TokenizeString()
176 LOG(DEBUG, ASSEMBLER) << " tokens identified: "; in TokenizeString()
178 for (const auto &f_i : lines_.back().tokens) { in TokenizeString()
187 return std::pair<std::vector<Token>, Error>(lines_.back().tokens, err_); in TokenizeString()
262 * Tokens handling: set a corresponding
263 * elements bound_left and bound_right of the array tokens
316 << "token " << curr_line_->tokens.size() + 1 << "): " in LexTokens()
321 … curr_line_->tokens.emplace_back(bound_left, bound_right, LexGetType(bound_left, bound_right), in LexTokens()
329 LOG(DEBUG, ASSEMBLER) << "all tokens identified (line " << lines_.size() << ")"; in LexTokens()
Dpandasm.h32 bool Tokenize(panda::pandasm::Lexer &lexer, std::vector<std::vector<panda::pandasm::Token>> &tokens,
35 …rseProgram(panda::pandasm::Parser &parser, std::vector<std::vector<panda::pandasm::Token>> &tokens,
/arkcompiler/runtime_core/compiler/optimizer/templates/
Dinstructions.rb18 module Tokens module
61 attr_accessor :tokens, :types accessor in Operand
73 Tokens::Types::INT8 => "DataType::INT8",
74 Tokens::Types::INT16 => "DataType::INT16",
75 Tokens::Types::INT32 => "DataType::INT32",
76 Tokens::Types::INT64 => "DataType::INT64",
77 Tokens::Types::UINT8 => "DataType::UINT8",
78 Tokens::Types::UINT16 => "DataType::UINT16",
79 Tokens::Types::UINT32 => "DataType::UINT32",
80 Tokens::Types::UINT64 => "DataType::UINT64",
[all …]
DIR-instructions.md.erb20 null_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens::Ot…
21 zero_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens::Ot…
22 bounds_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens::…
23 negative_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens
/arkcompiler/runtime_core/static_core/compiler/optimizer/templates/
Dinstructions.rb18 module Tokens module
61 attr_accessor :tokens, :types accessor in Operand
73 Tokens::Types::INT8 => "DataType::INT8",
74 Tokens::Types::INT16 => "DataType::INT16",
75 Tokens::Types::INT32 => "DataType::INT32",
76 Tokens::Types::INT64 => "DataType::INT64",
77 Tokens::Types::UINT8 => "DataType::UINT8",
78 Tokens::Types::UINT16 => "DataType::UINT16",
79 Tokens::Types::UINT32 => "DataType::UINT32",
80 Tokens::Types::UINT64 => "DataType::UINT64",
[all …]
DIR-instructions.md.erb20 null_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens::Ot…
21 zero_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens::Ot…
22 bounds_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens::…
23 negative_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens
/arkcompiler/runtime_core/static_core/assembler/tests/
Dlexer_test.cpp30 Tokens tok = l.TokenizeString(s); in TEST()
42 Tokens tok = l.TokenizeString(s); in TEST()
52 Tokens tok = l.TokenizeString(s); in TEST()
66 Tokens tok = l.TokenizeString(s); in TEST()
76 Tokens tok = l.TokenizeString(s); in TEST()
88 Tokens tok = l.TokenizeString(s); in TEST()
99 Tokens tok = l.TokenizeString(s); in TEST()
108 Tokens tok = l.TokenizeString(s); in TEST()
117 Tokens tok = l.TokenizeString(s); in TEST()
137 Tokens tok = l.TokenizeString(s); in TEST()
[all …]
/arkcompiler/runtime_core/assembler/tests/
Dlexer_test.cpp29 Tokens tok = l.TokenizeString(s); in TEST()
41 Tokens tok = l.TokenizeString(s); in TEST()
51 Tokens tok = l.TokenizeString(s); in TEST()
65 Tokens tok = l.TokenizeString(s); in TEST()
75 Tokens tok = l.TokenizeString(s); in TEST()
87 Tokens tok = l.TokenizeString(s); in TEST()
98 Tokens tok = l.TokenizeString(s); in TEST()
107 Tokens tok = l.TokenizeString(s); in TEST()
116 Tokens tok = l.TokenizeString(s); in TEST()
136 Tokens tok = l.TokenizeString(s); in TEST()
[all …]
Dassembler_lexer_test.cpp40 Tokens tok = l.TokenizeString(s);
58 Tokens tok = l.TokenizeString(s);
74 Tokens tok = l.TokenizeString(s);
94 Tokens tok = l.TokenizeString(s);
110 Tokens tok = l.TokenizeString(s);
128 Tokens tok = l.TokenizeString(s);
145 Tokens tok = l.TokenizeString(s);
160 Tokens tok = l.TokenizeString(s);
175 Tokens tok = l.TokenizeString(s);
201 Tokens tok = l.TokenizeString(s);
[all …]
/arkcompiler/ets_runtime/ecmascript/base/
Djson_parser.h44 enum class Tokens : uint8_t { enum
45 // six structural tokens
137 Tokens token = ParseToken(); in ParseJSONText()
139 case Tokens::OBJECT: in ParseJSONText()
164 case Tokens::ARRAY: in ParseJSONText()
173 case Tokens::LITERAL_TRUE: in ParseJSONText()
177 case Tokens::LITERAL_FALSE: in ParseJSONText()
181 case Tokens::LITERAL_NULL: in ParseJSONText()
185 case Tokens::NUMBER: in ParseJSONText()
189 case Tokens::STRING: in ParseJSONText()
[all …]
/arkcompiler/runtime_core/static_core/plugins/ets/doc/spec/
D1_intro.rst182 the structure of the elementary language parts called 'tokens'. All tokens are
183 defined in :ref:`Lexical Elements`. The set of tokens (identifiers, keywords,
203 The tokens defined by the lexical grammar are terminal symbols of the syntactic
207 how sequences of tokens can form syntactically correct programs.
348 that are not tokens in the alphabet of that language, i.e., operator
369 tokens can form syntactically correct programs.
373 operator and punctuator, or literal. Tokens are lexical input elements
378 -- the establishing of tokens in the process of codebase reading by
490 -- one of lexical input elements that separate tokens from one another
/arkcompiler/ets_frontend/test/scripts/auto_xts_test/
Drun.bat44 for /f "tokens=*" %%i in ('hdc list targets') do (set target=%%i)
51 for /f "tokens=1,2 delims==" %%i in (running_modules.txt) do (

12