Home
last modified time | relevance | path

Searched full:tokens (Results 1 – 25 of 50) sorted by relevance

12

/arkcompiler/runtime_core/assembler/
Dcontext.cpp27 tokens = t; in Make()
33 …token = std::string_view(&*(tokens[number - 1].whole_line.begin() + tokens[number - 1].bound_left), in Make()
34 tokens[number - 1].bound_right - tokens[number - 1].bound_left); in Make()
36 id = this->tokens[number - 1].type; in Make()
99 if (this->tokens.size() > number) { in Next()
100 return this->tokens[number].type; in Next()
103 return this->tokens[number - 1].type; in Next()
127 return this->tokens.size() < number + 1; in NextMask()
135 if (this->tokens.size() > number) { in operator ++()
138 id = this->tokens[number - 1].type; in operator ++()
[all …]
Dassembly-parser.h111 … context_.tokens[static_cast<int>(context_.number) + token_shift - 1].bound_left + shift,
112 … context_.tokens[static_cast<int>(context_.number) + token_shift - 1].bound_right,
113 … context_.tokens[static_cast<int>(context_.number) + token_shift - 1].whole_line);
120 … context_.tokens[context_.number - 1].bound_left + static_cast<size_t>(shift),
121 context_.tokens[context_.number - 1].bound_right,
122 … context_.tokens[context_.number - 1].whole_line, Error::ErrorClass::WARNING);
128 return SourcePosition {line_stric_, context_.tokens[context_.number - 1].bound_left}; in GetCurrentPosition()
130 return SourcePosition {line_stric_, context_.tokens[context_.number - 1].bound_right}; in GetCurrentPosition()
201 void ParseAsCatchall(const std::vector<Token> &tokens);
202 …void ParseAsLanguage(const std::vector<Token> &tokens, bool &is_lang_parsed, bool &is_first_statem…
[all …]
Dlexer.h62 size_t bound_left; /* right and left bounds of tokens */
74 using Tokens = std::pair<std::vector<Token>, Error>; variable
79 std::vector<Token> tokens; member
96 * Returns a vector of tokens.
98 Tokens TokenizeString(const std::string &);
Dpandasm.cpp107 bool Tokenize(panda::pandasm::Lexer &lexer, std::vector<std::vector<panda::pandasm::Token>> &tokens, in Tokenize() argument
113 panda::pandasm::Tokens q = lexer.TokenizeString(s); in Tokenize()
118 e.line_number = tokens.size() + 1; in Tokenize()
123 tokens.push_back(q.first); in Tokenize()
129 …rseProgram(panda::pandasm::Parser &parser, std::vector<std::vector<panda::pandasm::Token>> &tokens, in ParseProgram() argument
133 res = parser.Parse(tokens, input_file.GetValue()); in ParseProgram()
244 std::vector<std::vector<panda::pandasm::Token>> tokens; in main() local
246 if (!Tokenize(lexer, tokens, inputfile)) { in main()
255 if (!panda::pandasm::ParseProgram(parser, tokens, input_file, res)) { in main()
Dassembly-parser.cpp139 << "): " << context_.tokens[context_.number - 1].whole_line; in ParseFieldType()
213 void Parser::ParseAsArray(const std::vector<Token> &tokens) in ParseAsArray() argument
215 …LOG(DEBUG, ASSEMBLER) << "started parsing of array (line " << line_stric_ << "): " << tokens[0].wh… in ParseAsArray()
232 …LOG(DEBUG, ASSEMBLER) << "array body is open, line " << line_stric_ << ": " << tokens[0].whole_lin… in ParseAsArray()
262 …LOG(DEBUG, ASSEMBLER) << "array body is closed, line " << line_stric_ << ": " << tokens[0].whole_l… in ParseAsArray()
368 << "): " << context_.tokens[context_.number - 1].whole_line; in ParseArrayElementType()
528 void Parser::ParseAsRecord(const std::vector<Token> &tokens) in ParseAsRecord() argument
530 …LOG(DEBUG, ASSEMBLER) << "started parsing of record (line " << line_stric_ << "): " << tokens[0].w… in ParseAsRecord()
550 …G(DEBUG, ASSEMBLER) << "record body is open, line " << line_stric_ << ": " << tokens[0].whole_line; in ParseAsRecord()
562 …DEBUG, ASSEMBLER) << "record body is closed, line " << line_stric_ << ": " << tokens[0].whole_line; in ParseAsRecord()
[all …]
Dlexer.cpp162 Tokens Lexer::TokenizeString(const std::string &source_str) in TokenizeString()
176 LOG(DEBUG, ASSEMBLER) << " tokens identified: "; in TokenizeString()
178 for (const auto &f_i : lines_.back().tokens) { in TokenizeString()
187 return std::pair<std::vector<Token>, Error>(lines_.back().tokens, err_); in TokenizeString()
262 * Tokens handling: set a corresponding
263 * elements bound_left and bound_right of the array tokens
316 << "token " << curr_line_->tokens.size() + 1 << "): " in LexTokens()
321 … curr_line_->tokens.emplace_back(bound_left, bound_right, LexGetType(bound_left, bound_right), in LexTokens()
329 LOG(DEBUG, ASSEMBLER) << "all tokens identified (line " << lines_.size() << ")"; in LexTokens()
Dpandasm.h32 bool Tokenize(panda::pandasm::Lexer &lexer, std::vector<std::vector<panda::pandasm::Token>> &tokens,
35 …rseProgram(panda::pandasm::Parser &parser, std::vector<std::vector<panda::pandasm::Token>> &tokens,
Dassembly-context.h32 * Used to move around tokens.
45 std::vector<panda::pandasm::Token> tokens; /* token list */ member
/arkcompiler/runtime_core/static_core/assembler/
Dcontext.cpp28 tokens = t; in Make()
34 … token = std::string_view(&*(tokens[number - 1].wholeLine.begin() + tokens[number - 1].boundLeft), in Make()
35 tokens[number - 1].boundRight - tokens[number - 1].boundLeft); in Make()
37 id = this->tokens[number - 1].type; in Make()
105 if (this->tokens.size() > number) { in Next()
106 return this->tokens[number].type; in Next()
109 return this->tokens[number - 1].type; in Next()
133 return this->tokens.size() < number + 1; in NextMask()
141 if (this->tokens.size() > number) { in operator ++()
144 id = this->tokens[number - 1].type; in operator ++()
[all …]
Dassembly-parser.h112 … context_.tokens[static_cast<int>(context_.number) + tokenShift - 1].boundLeft + shift,
113 context_.tokens[static_cast<int>(context_.number) + tokenShift - 1].boundRight,
114 context_.tokens[static_cast<int>(context_.number) + tokenShift - 1].wholeLine);
121 … context_.tokens[context_.number - 1].boundLeft + static_cast<size_t>(shift),
122 context_.tokens[context_.number - 1].boundRight,
123 … context_.tokens[context_.number - 1].wholeLine, Error::ErrorClass::WARNING);
129 return SourcePosition {lineStric_, context_.tokens[context_.number - 1].boundLeft}; in GetCurrentPosition()
131 return SourcePosition {lineStric_, context_.tokens[context_.number - 1].boundRight}; in GetCurrentPosition()
204 void ParseAsCatchall(const std::vector<Token> &tokens);
205 …void ParseAsLanguage(const std::vector<Token> &tokens, bool &isLangParsed, bool &isFirstStatement);
[all …]
Dlexer.h68 size_t boundLeft; /* right and left bounds of tokens */
80 using Tokens = std::pair<std::vector<Token>, Error>; variable
85 std::vector<Token> tokens; member
103 * Returns a vector of tokens.
105 PANDA_PUBLIC_API Tokens TokenizeString(const std::string &sourceStr);
Dpandasm.cpp108 bool Tokenize(ark::pandasm::Lexer &lexer, std::vector<std::vector<ark::pandasm::Token>> &tokens, in Tokenize() argument
114 ark::pandasm::Tokens q = lexer.TokenizeString(s); in Tokenize()
119 e.lineNumber = tokens.size() + 1; in Tokenize()
124 tokens.push_back(q.first); in Tokenize()
130 …l ParseProgram(ark::pandasm::Parser &parser, std::vector<std::vector<ark::pandasm::Token>> &tokens, in ParseProgram() argument
134 res = parser.Parse(tokens, inputFile.GetValue()); in ParseProgram()
259 std::vector<std::vector<ark::pandasm::Token>> tokens; in main() local
261 if (!Tokenize(lexer, tokens, inputfile)) { in main()
270 if (!ark::pandasm::ParseProgram(parser, tokens, inputFile, res)) { in main()
Dassembly-parser.cpp147 << "): " << context_.tokens[context_.number - 1].wholeLine; in ParseFieldType()
217 void Parser::ParseAsArray(const std::vector<Token> &tokens) in ParseAsArray() argument
219 …LOG(DEBUG, ASSEMBLER) << "started parsing of array (line " << lineStric_ << "): " << tokens[0].who… in ParseAsArray()
236 … LOG(DEBUG, ASSEMBLER) << "array body is open, line " << lineStric_ << ": " << tokens[0].wholeLine; in ParseAsArray()
266 …LOG(DEBUG, ASSEMBLER) << "array body is closed, line " << lineStric_ << ": " << tokens[0].wholeLin… in ParseAsArray()
380 << "): " << context_.tokens[context_.number - 1].wholeLine; in ParseArrayElementType()
550 void Parser::ParseAsRecord(const std::vector<Token> &tokens) in ParseAsRecord() argument
552 …LOG(DEBUG, ASSEMBLER) << "started parsing of record (line " << lineStric_ << "): " << tokens[0].wh… in ParseAsRecord()
572 …LOG(DEBUG, ASSEMBLER) << "record body is open, line " << lineStric_ << ": " << tokens[0].wholeLine; in ParseAsRecord()
584 …G(DEBUG, ASSEMBLER) << "record body is closed, line " << lineStric_ << ": " << tokens[0].wholeLine; in ParseAsRecord()
[all …]
Dlexer.cpp160 Tokens Lexer::TokenizeString(const std::string &sourceStr) in TokenizeString()
174 LOG(DEBUG, ASSEMBLER) << " tokens identified: "; in TokenizeString()
176 for (const auto &fI : lines_.back().tokens) { in TokenizeString()
184 return std::pair<std::vector<Token>, Error>(lines_.back().tokens, err_); in TokenizeString()
257 * Tokens handling: set a corresponding
258 * elements bound_left and bound_right of the array tokens
307 << "token " << currLine_->tokens.size() + 1 << "): " in LexTokens()
312 …currLine_->tokens.emplace_back(boundLeft, boundRight, LexGetType(boundLeft, boundRight), currLine_… in LexTokens()
319 LOG(DEBUG, ASSEMBLER) << "all tokens identified (line " << lines_.size() << ")"; in LexTokens()
Dpandasm.h33 bool Tokenize(ark::pandasm::Lexer &lexer, std::vector<std::vector<ark::pandasm::Token>> &tokens,
36 …l ParseProgram(ark::pandasm::Parser &parser, std::vector<std::vector<ark::pandasm::Token>> &tokens,
/arkcompiler/runtime_core/static_core/compiler/optimizer/templates/
Dinstructions.rb18 module Tokens module
61 attr_accessor :tokens, :types accessor in Operand
73 Tokens::Types::INT8 => "DataType::INT8",
74 Tokens::Types::INT16 => "DataType::INT16",
75 Tokens::Types::INT32 => "DataType::INT32",
76 Tokens::Types::INT64 => "DataType::INT64",
77 Tokens::Types::UINT8 => "DataType::UINT8",
78 Tokens::Types::UINT16 => "DataType::UINT16",
79 Tokens::Types::UINT32 => "DataType::UINT32",
80 Tokens::Types::UINT64 => "DataType::UINT64",
[all …]
DIR-instructions.md.erb20 null_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens::Ot…
21 zero_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens::Ot…
22 bounds_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens::…
23 negative_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens
/arkcompiler/runtime_core/compiler/optimizer/templates/
Dinstructions.rb18 module Tokens module
61 attr_accessor :tokens, :types accessor in Operand
73 Tokens::Types::INT8 => "DataType::INT8",
74 Tokens::Types::INT16 => "DataType::INT16",
75 Tokens::Types::INT32 => "DataType::INT32",
76 Tokens::Types::INT64 => "DataType::INT64",
77 Tokens::Types::UINT8 => "DataType::UINT8",
78 Tokens::Types::UINT16 => "DataType::UINT16",
79 Tokens::Types::UINT32 => "DataType::UINT32",
80 Tokens::Types::UINT64 => "DataType::UINT64",
[all …]
DIR-instructions.md.erb20 null_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens::Ot…
21 zero_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens::Ot…
22 bounds_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens::…
23 negative_check_users = IR::instructions.select { |x| x.operands.any? { |o| o.tokens.include? Tokens
/arkcompiler/runtime_core/static_core/assembler/tests/
Dlexer_test.cpp32 Tokens tok = l.TokenizeString(s); in TEST()
44 Tokens tok = l.TokenizeString(s); in TEST()
54 Tokens tok = l.TokenizeString(s); in TEST()
68 Tokens tok = l.TokenizeString(s); in TEST()
78 Tokens tok = l.TokenizeString(s); in TEST()
90 Tokens tok = l.TokenizeString(s); in TEST()
101 Tokens tok = l.TokenizeString(s); in TEST()
110 Tokens tok = l.TokenizeString(s); in TEST()
119 Tokens tok = l.TokenizeString(s); in TEST()
139 Tokens tok = l.TokenizeString(s); in TEST()
[all …]
/arkcompiler/runtime_core/assembler/tests/
Dlexer_test.cpp29 Tokens tok = l.TokenizeString(s); in TEST()
41 Tokens tok = l.TokenizeString(s); in TEST()
51 Tokens tok = l.TokenizeString(s); in TEST()
65 Tokens tok = l.TokenizeString(s); in TEST()
75 Tokens tok = l.TokenizeString(s); in TEST()
87 Tokens tok = l.TokenizeString(s); in TEST()
98 Tokens tok = l.TokenizeString(s); in TEST()
107 Tokens tok = l.TokenizeString(s); in TEST()
116 Tokens tok = l.TokenizeString(s); in TEST()
136 Tokens tok = l.TokenizeString(s); in TEST()
[all …]
Dassembler_lexer_test.cpp40 Tokens tok = l.TokenizeString(s);
58 Tokens tok = l.TokenizeString(s);
74 Tokens tok = l.TokenizeString(s);
94 Tokens tok = l.TokenizeString(s);
110 Tokens tok = l.TokenizeString(s);
128 Tokens tok = l.TokenizeString(s);
145 Tokens tok = l.TokenizeString(s);
160 Tokens tok = l.TokenizeString(s);
175 Tokens tok = l.TokenizeString(s);
201 Tokens tok = l.TokenizeString(s);
[all …]
/arkcompiler/runtime_core/panda_guard/util/
Dstring_util.cpp33 std::vector<std::string> tokens; in Split() local
38 tokens.push_back(str.substr(start, pos - start)); in Split()
44 return tokens; in Split()
49 std::vector<std::string> tokens; in StrictSplit() local
59 tokens.push_back(str.substr(start, end - start)); in StrictSplit()
61 tokens.emplace_back(""); in StrictSplit()
66 return tokens; in StrictSplit()
/arkcompiler/runtime_core/static_core/plugins/ets/tests/ets_func_tests/escompat/
DProcessTest.sts50 let tokens: String[] = output.split(separator);
51 return tokens;
56 let tokens: String[] = output.split(separator);
57 return tokens;
95 let tokens = output[i].replace("Uid:", "").split("\t");
96 result.set("uid", parseInt(tokens[1]));
97 result.set("euid", parseInt(tokens[2]));
99 let tokens = output[i].replace("Gid:", "").split("\t");
100 result.set("gid", parseInt(tokens[1]));
101 result.set("egid", parseInt(tokens[2]));
[all …]
/arkcompiler/runtime_core/static_core/plugins/ets/doc/spec/
D1_intro.rst180 the structure of the elementary language parts called *tokens*. All tokens are
181 defined in :ref:`Lexical Elements`. The set of tokens (identifiers, keywords,
201 The tokens defined by the lexical grammar are terminal symbols of syntactic
205 how sequences of tokens can form syntactically correct programs.
347 that are not tokens in the alphabet of that language, i.e., operator
368 tokens can form syntactically correct programs.
372 operator and punctuator, or literal. Tokens are lexical input elements
490 -- one of lexical input elements that separate tokens from one another

12