/** * Copyright (c) 2021 Huawei Device Co., Ltd. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Autogenerated file -- DO NOT EDIT! #ifndef ES2PANDA_LEXER_KEYWORDS_H #define ES2PANDA_LEXER_KEYWORDS_H #include "lexer/keywordsBase.h" #include "utils/span.h" // NOLINTBEGIN(readability-identifier-naming) namespace panda::es2panda::lexer { % token_type_prefix = 'TokenType::' % Keywords::extensions.each do |extension_name, desc| % class_name = extension_name.upcase + 'Keywords' class <%=class_name%> : public Keywords { public: explicit <%=class_name%>(Lexer* lexer, lexer::NextTokenFlags flags) : Keywords(lexer, flags) {} NO_COPY_SEMANTIC(<%=class_name%>); NO_MOVE_SEMANTIC(<%=class_name%>); ~<%=class_name%>() = default; Span KeywordMap(char32_t cp) const override { switch(cp) { % desc['keyword_starts'].each do |cp| case '<%= cp %>': return Span(KEYWORDS_<%= cp %>); % end default: return Span(); } } void ScanKeyword(char32_t cp) override { ASSERT(cp >= 'a' && cp <= 'z'); switch(cp) { % desc['all_word_starts'].each do |cp| case '<%=cp%>': Scan_<%=cp%>(); break; % end default: Util().ScanIdContinue(); break; } } void HandlePotentialEscapedKeyword(const KeywordString& kws) const override { switch(kws.GetTokenType()) { % desc['all_words'].select { |kw| kw&.custom_handler&.include? extension_name}.each do |kw| case <%=token_type_prefix + kw.token %>: Util().SetKeyword(Handle_<%=kw.name %>(Util(), "<%=kw.name %>", <%=token_type_prefix + kw.token %>)); return; % end default: if (Util().KeywordToIdent()) { return; } } Util().ThrowEscapedKeyword(); } % desc['all_words'].select { |kw| kw&.custom_handler&.include? extension_name}.each do |kw| // NOLINTNEXTLINE(readability-identifier-naming) static KeywordString Handle_<%= kw.name%>(const KeywordsUtil& util, std::string_view src, TokenType tokenType); % end private: % desc['keyword_starts'].each do |cp| % kws = desc['keywords'].select { |kw| kw.name[0] == cp } static constexpr std::array> KEYWORDS_<%= cp %> = {{ % kws.each do |kw| {"<%= kw.name%>", <%= token_type_prefix + kw.token %>}, % end }}; % end % desc['tree'].each do |key, prefixes| inline void Scan_<%= key %>() { switch(Util().Iterator().Peek()) { % prefixes.select{|p| !p.nil? }.each do |prefix| case '<%=prefix%>': { Util().Iterator().Forward(1); Scan_<%= key%><%= prefix %>(); return; } % end % if prefixes.include?(nil) default: { if (!KeywordsUtil::IsIdentifierPart(Util().Iterator().PeekCp())) { % kw_desc = desc['all_words'].find{ |x| x.name == key } % if kw_desc&.custom_handler&.include? extension_name SetKeyword>({"<%=key%>", <%= token_type_prefix + kw_desc.token%>}); % else % token_type =kw_desc.token % if kw_desc&.keyword_like&.include? extension_name % token_type = 'LITERAL_IDENT' % end SetKeyword({"<%=key%>", <%=token_type_prefix + token_type%>, <%=token_type_prefix + kw_desc.token%>}); % end return; } break; } } % else default: { break; } } % end % if desc['keyword_starts'].include?(key[0]) Util().ScanIdContinueMaybeKeyword(this, Span(KEYWORDS_<%= key[0] %>)); % else Util().ScanIdContinue(); % end } % end }; % end } // namespace panda::es2panda::lexer // NOLINTEND(readability-identifier-naming) #endif