| /arkcompiler/runtime_core/static_core/assembler/ |
| D | context.cpp | 28 tokens = t; in Make() 34 … token = std::string_view(&*(tokens[number - 1].wholeLine.begin() + tokens[number - 1].boundLeft), in Make() 35 tokens[number - 1].boundRight - tokens[number - 1].boundLeft); in Make() 37 id = this->tokens[number - 1].type; in Make() 105 if (this->tokens.size() > number) { in Next() 106 return this->tokens[number].type; in Next() 109 return this->tokens[number - 1].type; in Next() 133 return this->tokens.size() < number + 1; in NextMask() 141 if (this->tokens.size() > number) { in operator ++() 144 id = this->tokens[number - 1].type; in operator ++() [all …]
|
| D | assembly-parser.h | 143 … context_.tokens[static_cast<int>(context_.number) + tokenShift - 1].boundLeft + shift, 144 context_.tokens[static_cast<int>(context_.number) + tokenShift - 1].boundRight, 145 context_.tokens[static_cast<int>(context_.number) + tokenShift - 1].wholeLine); 152 … context_.tokens[context_.number - 1].boundLeft + static_cast<size_t>(shift), 153 context_.tokens[context_.number - 1].boundRight, 154 … context_.tokens[context_.number - 1].wholeLine, Error::ErrorClass::WARNING); 160 return SourcePosition {lineStric_, context_.tokens[context_.number - 1].boundLeft}; in GetCurrentPosition() 162 return SourcePosition {lineStric_, context_.tokens[context_.number - 1].boundRight}; in GetCurrentPosition() 236 void ParseAsCatchall(const std::vector<Token> &tokens); 237 …void ParseAsLanguage(const std::vector<Token> &tokens, bool &isLangParsed, bool &isFirstStatement); [all …]
|
| D | lexer.h | 68 size_t boundLeft; /* right and left bounds of tokens */ 80 using Tokens = std::pair<std::vector<Token>, Error>; variable 85 std::vector<Token> tokens; member 103 * Returns a vector of tokens. 105 PANDA_PUBLIC_API Tokens TokenizeString(const std::string &sourceStr);
|
| D | pandasm.cpp | 108 bool Tokenize(ark::pandasm::Lexer &lexer, std::vector<std::vector<ark::pandasm::Token>> &tokens, in Tokenize() argument 114 ark::pandasm::Tokens q = lexer.TokenizeString(s); in Tokenize() 119 e.lineNumber = tokens.size() + 1; in Tokenize() 124 tokens.push_back(q.first); in Tokenize() 130 …l ParseProgram(ark::pandasm::Parser &parser, std::vector<std::vector<ark::pandasm::Token>> &tokens, in ParseProgram() argument 134 res = parser.Parse(tokens, inputFile.GetValue()); in ParseProgram() 259 std::vector<std::vector<ark::pandasm::Token>> tokens; in main() local 261 if (!Tokenize(lexer, tokens, inputfile)) { in main() 270 if (!ark::pandasm::ParseProgram(parser, tokens, inputFile, res)) { in main()
|
| D | lexer.cpp | 160 Tokens Lexer::TokenizeString(const std::string &sourceStr) in TokenizeString() 174 LOG(DEBUG, ASSEMBLER) << " tokens identified: "; in TokenizeString() 176 for (const auto &fI : lines_.back().tokens) { in TokenizeString() 184 return std::pair<std::vector<Token>, Error>(lines_.back().tokens, err_); in TokenizeString() 264 size_t currTokenSize = currLine->tokens.size(); in IsAngleBracketInFunctionName() 269 …return currLine->tokens[currTokenSize - FUNCTION_KEY_WORD_OFFSET].type == Token::Type::ID_FUN && i… in IsAngleBracketInFunctionName() 299 * Tokens handling: set a corresponding 300 * elements bound_left and bound_right of the array tokens 341 << "token " << currLine_->tokens.size() + 1 << "): " in LexTokens() 346 …currLine_->tokens.emplace_back(boundLeft, boundRight, LexGetType(boundLeft, boundRight), currLine_… in LexTokens() [all …]
|
| D | assembly-parser.cpp | 134 << "): " << context_.tokens[context_.number - 1].wholeLine; in ParseFieldType() 228 void Parser::ParseAsArray(const std::vector<Token> &tokens) in ParseAsArray() argument 230 …LOG(DEBUG, ASSEMBLER) << "started parsing of array (line " << lineStric_ << "): " << tokens[0].who… in ParseAsArray() 247 … LOG(DEBUG, ASSEMBLER) << "array body is open, line " << lineStric_ << ": " << tokens[0].wholeLine; in ParseAsArray() 277 …LOG(DEBUG, ASSEMBLER) << "array body is closed, line " << lineStric_ << ": " << tokens[0].wholeLin… in ParseAsArray() 391 << "): " << context_.tokens[context_.number - 1].wholeLine; in ParseArrayElementType() 561 void Parser::ParseAsRecord(const std::vector<Token> &tokens) in ParseAsRecord() argument 563 …LOG(DEBUG, ASSEMBLER) << "started parsing of record (line " << lineStric_ << "): " << tokens[0].wh… in ParseAsRecord() 583 …LOG(DEBUG, ASSEMBLER) << "record body is open, line " << lineStric_ << ": " << tokens[0].wholeLine; in ParseAsRecord() 595 …G(DEBUG, ASSEMBLER) << "record body is closed, line " << lineStric_ << ": " << tokens[0].wholeLine; in ParseAsRecord() [all …]
|
| /arkcompiler/runtime_core/assembler/ |
| D | context.cpp | 27 tokens = t; in Make() 33 …token = std::string_view(&*(tokens[number - 1].whole_line.begin() + tokens[number - 1].bound_left), in Make() 34 tokens[number - 1].bound_right - tokens[number - 1].bound_left); in Make() 36 id = this->tokens[number - 1].type; in Make() 99 if (this->tokens.size() > number) { in Next() 100 return this->tokens[number].type; in Next() 103 return this->tokens[number - 1].type; in Next() 127 return this->tokens.size() < number + 1; in NextMask() 135 if (this->tokens.size() > number) { in operator ++() 138 id = this->tokens[number - 1].type; in operator ++() [all …]
|
| D | assembly-parser.h | 117 … context_.tokens[static_cast<int>(context_.number) + token_shift - 1].bound_left + shift, 118 … context_.tokens[static_cast<int>(context_.number) + token_shift - 1].bound_right, 119 … context_.tokens[static_cast<int>(context_.number) + token_shift - 1].whole_line); 126 … context_.tokens[context_.number - 1].bound_left + static_cast<size_t>(shift), 127 context_.tokens[context_.number - 1].bound_right, 128 … context_.tokens[context_.number - 1].whole_line, Error::ErrorClass::WARNING); 134 return SourcePosition {line_stric_, context_.tokens[context_.number - 1].bound_left}; in GetCurrentPosition() 136 return SourcePosition {line_stric_, context_.tokens[context_.number - 1].bound_right}; in GetCurrentPosition() 207 void ParseAsCatchall(const std::vector<Token> &tokens); 208 …void ParseAsLanguage(const std::vector<Token> &tokens, bool &is_lang_parsed, bool &is_first_statem… [all …]
|
| D | lexer.h | 62 size_t bound_left; /* right and left bounds of tokens */ 74 using Tokens = std::pair<std::vector<Token>, Error>; variable 79 std::vector<Token> tokens; member 96 * Returns a vector of tokens. 98 Tokens TokenizeString(const std::string &);
|
| D | pandasm.cpp | 107 bool Tokenize(panda::pandasm::Lexer &lexer, std::vector<std::vector<panda::pandasm::Token>> &tokens, in Tokenize() argument 113 panda::pandasm::Tokens q = lexer.TokenizeString(s); in Tokenize() 118 e.line_number = tokens.size() + 1; in Tokenize() 123 tokens.push_back(q.first); in Tokenize() 129 …rseProgram(panda::pandasm::Parser &parser, std::vector<std::vector<panda::pandasm::Token>> &tokens, in ParseProgram() argument 133 res = parser.Parse(tokens, input_file.GetValue()); in ParseProgram() 244 std::vector<std::vector<panda::pandasm::Token>> tokens; in main() local 246 if (!Tokenize(lexer, tokens, inputfile)) { in main() 255 if (!panda::pandasm::ParseProgram(parser, tokens, input_file, res)) { in main()
|
| D | lexer.cpp | 162 Tokens Lexer::TokenizeString(const std::string &source_str) in TokenizeString() 176 LOG(DEBUG, ASSEMBLER) << " tokens identified: "; in TokenizeString() 178 for (const auto &f_i : lines_.back().tokens) { in TokenizeString() 187 return std::pair<std::vector<Token>, Error>(lines_.back().tokens, err_); in TokenizeString() 262 * Tokens handling: set a corresponding 263 * elements bound_left and bound_right of the array tokens 316 << "token " << curr_line_->tokens.size() + 1 << "): " in LexTokens() 321 … curr_line_->tokens.emplace_back(bound_left, bound_right, LexGetType(bound_left, bound_right), in LexTokens() 329 LOG(DEBUG, ASSEMBLER) << "all tokens identified (line " << lines_.size() << ")"; in LexTokens()
|
| D | assembly-parser.cpp | 139 << "): " << context_.tokens[context_.number - 1].whole_line; in ParseFieldType() 213 void Parser::ParseAsArray(const std::vector<Token> &tokens) in ParseAsArray() argument 215 …LOG(DEBUG, ASSEMBLER) << "started parsing of array (line " << line_stric_ << "): " << tokens[0].wh… in ParseAsArray() 232 …LOG(DEBUG, ASSEMBLER) << "array body is open, line " << line_stric_ << ": " << tokens[0].whole_lin… in ParseAsArray() 262 …LOG(DEBUG, ASSEMBLER) << "array body is closed, line " << line_stric_ << ": " << tokens[0].whole_l… in ParseAsArray() 368 << "): " << context_.tokens[context_.number - 1].whole_line; in ParseArrayElementType() 543 void Parser::ParseAsRecord(const std::vector<Token> &tokens) in ParseAsRecord() argument 545 …LOG(DEBUG, ASSEMBLER) << "started parsing of record (line " << line_stric_ << "): " << tokens[0].w… in ParseAsRecord() 565 …G(DEBUG, ASSEMBLER) << "record body is open, line " << line_stric_ << ": " << tokens[0].whole_line; in ParseAsRecord() 577 …DEBUG, ASSEMBLER) << "record body is closed, line " << line_stric_ << ": " << tokens[0].whole_line; in ParseAsRecord() [all …]
|
| /arkcompiler/runtime_core/static_core/compiler/optimizer/templates/ |
| D | instructions.rb | 18 module Tokens module 61 attr_accessor :tokens, :types accessor in Operand 73 Tokens::Types::INT8 => "DataType::INT8", 74 Tokens::Types::INT16 => "DataType::INT16", 75 Tokens::Types::INT32 => "DataType::INT32", 76 Tokens::Types::INT64 => "DataType::INT64", 77 Tokens::Types::UINT8 => "DataType::UINT8", 78 Tokens::Types::UINT16 => "DataType::UINT16", 79 Tokens::Types::UINT32 => "DataType::UINT32", 80 Tokens::Types::UINT64 => "DataType::UINT64", [all …]
|
| D | IR-instructions.md.erb | 20 null_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens::Ot… 21 zero_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens::Ot… 22 bounds_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens::… 23 negative_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens…
|
| /arkcompiler/runtime_core/compiler/optimizer/templates/ |
| D | instructions.rb | 18 module Tokens module 61 attr_accessor :tokens, :types accessor in Operand 73 Tokens::Types::INT8 => "DataType::INT8", 74 Tokens::Types::INT16 => "DataType::INT16", 75 Tokens::Types::INT32 => "DataType::INT32", 76 Tokens::Types::INT64 => "DataType::INT64", 77 Tokens::Types::UINT8 => "DataType::UINT8", 78 Tokens::Types::UINT16 => "DataType::UINT16", 79 Tokens::Types::UINT32 => "DataType::UINT32", 80 Tokens::Types::UINT64 => "DataType::UINT64", [all …]
|
| D | IR-instructions.md.erb | 20 null_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens::Ot… 21 zero_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens::Ot… 22 bounds_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens::… 23 negative_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens…
|
| /arkcompiler/ets_frontend/ets2panda/lexer/scripts/ |
| D | tokens.rb | 15 module Tokens module 18 def tokens method 19 @tokens 27 @tokens = data.punctuators.map do |token| 36 Tokens.wrap_data(data)
|
| /arkcompiler/ets_frontend/ets2panda/lexer/templates/ |
| D | tokenType.h.erb | 21 % allTokens = Tokens::tokens + Keywords::keywords 33 % raise "Invalid flag #{flag}" unless Tokens::flags.has_key? flag.to_sym 39 FIRST_PUNCTUATOR = <%= Tokens::tokens[0].token %>,
|
| D | token.inl.erb | 22 % allTokens = Tokens::tokens + Keywords::keywords 46 % genSwitch = lambda do |var, tokens, flag| 48 % tokens.select { |token| token.flags&.include? flag }.each do |token| 79 % genSwitch['type', Tokens::tokens, 'binary_lvalue']
|
| /arkcompiler/runtime_core/static_core/assembler/tests/ |
| D | lexer_test.cpp | 32 Tokens tok = l.TokenizeString(s); in TEST() 44 Tokens tok = l.TokenizeString(s); in TEST() 54 Tokens tok = l.TokenizeString(s); in TEST() 68 Tokens tok = l.TokenizeString(s); in TEST() 78 Tokens tok = l.TokenizeString(s); in TEST() 90 Tokens tok = l.TokenizeString(s); in TEST() 101 Tokens tok = l.TokenizeString(s); in TEST() 110 Tokens tok = l.TokenizeString(s); in TEST() 119 Tokens tok = l.TokenizeString(s); in TEST() 139 Tokens tok = l.TokenizeString(s); in TEST() [all …]
|
| /arkcompiler/runtime_core/assembler/tests/ |
| D | lexer_test.cpp | 29 Tokens tok = l.TokenizeString(s); in TEST() 41 Tokens tok = l.TokenizeString(s); in TEST() 51 Tokens tok = l.TokenizeString(s); in TEST() 65 Tokens tok = l.TokenizeString(s); in TEST() 75 Tokens tok = l.TokenizeString(s); in TEST() 87 Tokens tok = l.TokenizeString(s); in TEST() 98 Tokens tok = l.TokenizeString(s); in TEST() 107 Tokens tok = l.TokenizeString(s); in TEST() 116 Tokens tok = l.TokenizeString(s); in TEST() 136 Tokens tok = l.TokenizeString(s); in TEST() [all …]
|
| D | assembler_lexer_test.cpp | 40 Tokens tok = l.TokenizeString(s); 58 Tokens tok = l.TokenizeString(s); 74 Tokens tok = l.TokenizeString(s); 94 Tokens tok = l.TokenizeString(s); 110 Tokens tok = l.TokenizeString(s); 128 Tokens tok = l.TokenizeString(s); 145 Tokens tok = l.TokenizeString(s); 160 Tokens tok = l.TokenizeString(s); 175 Tokens tok = l.TokenizeString(s); 201 Tokens tok = l.TokenizeString(s); [all …]
|
| /arkcompiler/runtime_core/panda_guard/util/ |
| D | string_util.cpp | 33 std::vector<std::string> tokens; in Split() local 38 tokens.push_back(str.substr(start, pos - start)); in Split() 44 return tokens; in Split() 49 std::vector<std::string> tokens; in StrictSplit() local 59 tokens.push_back(str.substr(start, end - start)); in StrictSplit() 61 tokens.emplace_back(""); in StrictSplit() 66 return tokens; in StrictSplit()
|
| /arkcompiler/runtime_core/static_core/plugins/ets/tests/ets_func_tests/escompat/ |
| D | ProcessTest.ets | 51 let tokens: String[] = output.split(separator); 52 return tokens; 57 let tokens: String[] = output.split(separator); 58 return tokens; 96 let tokens = output[i].replace("Uid:", "").split("\t"); 97 result.set("uid", parseInt(tokens[1])); 98 result.set("euid", parseInt(tokens[2])); 100 let tokens = output[i].replace("Gid:", "").split("\t"); 101 result.set("gid", parseInt(tokens[1])); 102 result.set("egid", parseInt(tokens[2])); [all …]
|
| /arkcompiler/ets_frontend/ets2panda/lsp/include/formatting/ |
| D | rule.h | 71 explicit TokenRange(std::vector<ir::AstNodeType> &tokens, bool isSpecific) in TokenRange() 72 : tokens_(std::move(tokens)), isSpecific_(isSpecific) in TokenRange()
|