Home
last modified time | relevance | path

Searched full:tokens (Results 1 – 20 of 20) sorted by relevance

/arkcompiler/runtime_core/assembler/
Dcontext.cpp28 tokens = t; in Make()
34 …token = std::string_view(&*(tokens[number - 1].whole_line.begin() + tokens[number - 1].bound_left), in Make()
35 tokens[number - 1].bound_right - tokens[number - 1].bound_left); in Make()
37 id = this->tokens[number - 1].type; in Make()
100 if (this->tokens.size() > number) { in Next()
101 return this->tokens[number].type; in Next()
104 return this->tokens[number - 1].type; in Next()
128 return this->tokens.size() < number + 1; in NextMask()
136 if (this->tokens.size() > number) { in operator ++()
139 id = this->tokens[number - 1].type; in operator ++()
[all …]
Dassembly-parser.h111 … context_.tokens[static_cast<int>(context_.number) + token_shift - 1].bound_left + shift,
112 … context_.tokens[static_cast<int>(context_.number) + token_shift - 1].bound_right,
113 … context_.tokens[static_cast<int>(context_.number) + token_shift - 1].whole_line);
120 … context_.tokens[context_.number - 1].bound_left + static_cast<size_t>(shift),
121 context_.tokens[context_.number - 1].bound_right,
122 … context_.tokens[context_.number - 1].whole_line, Error::ErrorClass::WARNING);
128 return SourcePosition {line_stric_, context_.tokens[context_.number - 1].bound_left}; in GetCurrentPosition()
130 return SourcePosition {line_stric_, context_.tokens[context_.number - 1].bound_right}; in GetCurrentPosition()
201 void ParseAsCatchall(const std::vector<Token> &tokens);
202 …void ParseAsLanguage(const std::vector<Token> &tokens, bool &is_lang_parsed, bool &is_first_statem…
[all …]
Dlexer.h62 size_t bound_left; /* right and left bounds of tokens */
74 using Tokens = std::pair<std::vector<Token>, Error>; variable
79 std::vector<Token> tokens; member
96 * Returns a vector of tokens.
98 Tokens TokenizeString(const std::string &);
Dassembly-parser.cpp147 << "): " << context_.tokens[context_.number - 1].whole_line; in ParseFieldType()
221 void Parser::ParseAsArray(const std::vector<Token> &tokens) in ParseAsArray() argument
223 …LOG(DEBUG, ASSEMBLER) << "started parsing of array (line " << line_stric_ << "): " << tokens[0].wh… in ParseAsArray()
240 …LOG(DEBUG, ASSEMBLER) << "array body is open, line " << line_stric_ << ": " << tokens[0].whole_lin… in ParseAsArray()
270 …LOG(DEBUG, ASSEMBLER) << "array body is closed, line " << line_stric_ << ": " << tokens[0].whole_l… in ParseAsArray()
376 << "): " << context_.tokens[context_.number - 1].whole_line; in ParseArrayElementType()
536 void Parser::ParseAsRecord(const std::vector<Token> &tokens) in ParseAsRecord() argument
538 …LOG(DEBUG, ASSEMBLER) << "started parsing of record (line " << line_stric_ << "): " << tokens[0].w… in ParseAsRecord()
558 …G(DEBUG, ASSEMBLER) << "record body is open, line " << line_stric_ << ": " << tokens[0].whole_line; in ParseAsRecord()
570 …DEBUG, ASSEMBLER) << "record body is closed, line " << line_stric_ << ": " << tokens[0].whole_line; in ParseAsRecord()
[all …]
Dpandasm.cpp110 bool Tokenize(panda::pandasm::Lexer &lexer, std::vector<std::vector<panda::pandasm::Token>> &tokens, in Tokenize() argument
116 panda::pandasm::Tokens q = lexer.TokenizeString(s); in Tokenize()
121 e.line_number = tokens.size() + 1; in Tokenize()
126 tokens.push_back(q.first); in Tokenize()
132 …rseProgram(panda::pandasm::Parser &parser, std::vector<std::vector<panda::pandasm::Token>> &tokens, in ParseProgram() argument
136 res = parser.Parse(tokens, input_file.GetValue()); in ParseProgram()
262 std::vector<std::vector<panda::pandasm::Token>> tokens; in main() local
264 if (!Tokenize(lexer, tokens, inputfile)) { in main()
273 if (!panda::pandasm::ParseProgram(parser, tokens, input_file, res)) { in main()
Dlexer.cpp162 Tokens Lexer::TokenizeString(const std::string &source_str) in TokenizeString()
176 LOG(DEBUG, ASSEMBLER) << " tokens identified: "; in TokenizeString()
178 for (const auto &f_i : lines_.back().tokens) { in TokenizeString()
187 return std::pair<std::vector<Token>, Error>(lines_.back().tokens, err_); in TokenizeString()
262 * Tokens handling: set a corresponding
263 * elements bound_left and bound_right of the array tokens
316 << "token " << curr_line_->tokens.size() + 1 << "): " in LexTokens()
321 … curr_line_->tokens.emplace_back(bound_left, bound_right, LexGetType(bound_left, bound_right), in LexTokens()
329 LOG(DEBUG, ASSEMBLER) << "all tokens identified (line " << lines_.size() << ")"; in LexTokens()
Dpandasm.h32 bool Tokenize(panda::pandasm::Lexer &lexer, std::vector<std::vector<panda::pandasm::Token>> &tokens,
35 …rseProgram(panda::pandasm::Parser &parser, std::vector<std::vector<panda::pandasm::Token>> &tokens,
Dassembly-context.h32 * Used to move around tokens.
45 std::vector<panda::pandasm::Token> tokens; /* token list */ member
/arkcompiler/runtime_core/compiler/optimizer/templates/
Dinstructions.rb18 module Tokens module
61 attr_accessor :tokens, :types accessor in Operand
73 Tokens::Types::INT8 => "DataType::INT8",
74 Tokens::Types::INT16 => "DataType::INT16",
75 Tokens::Types::INT32 => "DataType::INT32",
76 Tokens::Types::INT64 => "DataType::INT64",
77 Tokens::Types::UINT8 => "DataType::UINT8",
78 Tokens::Types::UINT16 => "DataType::UINT16",
79 Tokens::Types::UINT32 => "DataType::UINT32",
80 Tokens::Types::UINT64 => "DataType::UINT64",
[all …]
DIR-instructions.md.erb20 null_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens::Ot…
21 zero_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens::Ot…
22 bounds_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens::…
23 negative_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens
Dcodegen_arm64.rb39 # Next methods are DSL tokens implementation
107 …# This method aims to handle DSL tokens that don't have methods with same name, for example, ld_8 …
/arkcompiler/runtime_core/assembler/tests/
Dlexer_test.cpp29 Tokens tok = l.TokenizeString(s); in TEST()
41 Tokens tok = l.TokenizeString(s); in TEST()
51 Tokens tok = l.TokenizeString(s); in TEST()
65 Tokens tok = l.TokenizeString(s); in TEST()
75 Tokens tok = l.TokenizeString(s); in TEST()
87 Tokens tok = l.TokenizeString(s); in TEST()
98 Tokens tok = l.TokenizeString(s); in TEST()
107 Tokens tok = l.TokenizeString(s); in TEST()
116 Tokens tok = l.TokenizeString(s); in TEST()
136 Tokens tok = l.TokenizeString(s); in TEST()
[all …]
/arkcompiler/ets_runtime/ecmascript/base/
Djson_parser.h41 enum class Tokens : uint8_t { enum
42 // six structural tokens
104 Tokens token = ParseToken(); in ParseJSONText()
106 case Tokens::OBJECT: in ParseJSONText()
108 case Tokens::ARRAY: in ParseJSONText()
110 case Tokens::LITERAL_TRUE: in ParseJSONText()
111 return ParseLiteral("true", Tokens::LITERAL_TRUE); in ParseJSONText()
112 case Tokens::LITERAL_FALSE: in ParseJSONText()
113 return ParseLiteral("false", Tokens::LITERAL_FALSE); in ParseJSONText()
114 case Tokens::LITERAL_NULL: in ParseJSONText()
[all …]
/arkcompiler/runtime_core/assembler/templates/
Dopcode_parsing.h.erb24 …MBLER) << "operand search started (line " << line_stric_ << "): " << context_.tokens[0].whole_line;
/arkcompiler/runtime_core/compiler/optimizer/ir/
Dinstructions.yaml20 For info about operand tokens see the 'legend' section
1117 "Operand tokens":
/arkcompiler/ets_frontend/ts2panda/
Dpackage-lock.json304 "js-tokens": "^4.0.0"
1513 "js-tokens": "^3.0.2"
1541 "js-tokens": {
1543 "resolved": "https://repo.huaweicloud.com/repository/npm/js-tokens/-/js-tokens-3.0.2.tgz",
2775 "js-tokens": {
2777 "resolved": "https://repo.huaweicloud.com/repository/npm/js-tokens/-/js-tokens-4.0.0.tgz",
/arkcompiler/ets_frontend/legacy_bin/api8/
Dpackage-lock.json278 "js-tokens": "^4.0.0"
1456 "js-tokens": "^3.0.2"
1484 "js-tokens": {
1486 "resolved": "https://registry.npmmirror.com/js-tokens/-/js-tokens-3.0.2.tgz",
2852 "js-tokens": {
2854 "resolved": "https://repo.huaweicloud.com/repository/npm/js-tokens/-/js-tokens-4.0.0.tgz",
/arkcompiler/ets_frontend/test262/
Des5_tests.txt7370 language/line-terminators/between-tokens-cr.js
7371 language/line-terminators/between-tokens-lf.js
7372 language/line-terminators/between-tokens-ls.js
7373 language/line-terminators/between-tokens-ps.js
DCI_tests.txt3109 language/line-terminators/between-tokens-ps.js
/arkcompiler/ets_frontend/legacy_bin/api8/src/
Dindex.js2tokens should not be the first child of `JsxElement | JsxSelfClosingElement`");else if(q(t[i],n))r… property