• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include <iostream>
17 #include <string>
18 
19 #include <gtest/gtest.h>
20 #include "../define.h"
21 #include "../lexer.h"
22 
23 using namespace panda::pandasm;
24 
TEST(lexertests,test1)25 TEST(lexertests, test1)
26 {
27     Lexer l;
28     std::string s = "mov v1, v2";
29     Tokens tok = l.TokenizeString(s);
30     ASSERT_EQ(TokenTypeWhat(tok.first[0].type), "OPERATION") << "OPERATION expected";
31     ASSERT_EQ(TokenTypeWhat(tok.first[1].type), "ID") << "ID expected";
32     ASSERT_EQ(TokenTypeWhat(tok.first[2].type), "DEL_COMMA") << "DEL_COMMA expected";
33     ASSERT_EQ(TokenTypeWhat(tok.first[3].type), "ID") << "ID expected";
34     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
35 }
36 
TEST(lexertests,test2)37 TEST(lexertests, test2)
38 {
39     Lexer l;
40     std::string s = "ldai 1";
41     Tokens tok = l.TokenizeString(s);
42     ASSERT_EQ(TokenTypeWhat(tok.first[0].type), "OPERATION") << "OPERATION expected";
43     ASSERT_EQ(TokenTypeWhat(tok.first[1].type), "ID") << "ID expected";
44     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
45 }
46 
TEST(lexertests,test3)47 TEST(lexertests, test3)
48 {
49     Lexer l;
50     std::string s = "movi\nlda v2 v10 mov v2";
51     Tokens tok = l.TokenizeString(s);
52     ASSERT_EQ(TokenTypeWhat(tok.first[0].type), "OPERATION") << "OPERATION expected";
53     ASSERT_EQ(TokenTypeWhat(tok.first[1].type), "OPERATION") << "OPERATION expected";
54     ASSERT_EQ(TokenTypeWhat(tok.first[2].type), "ID") << "ID expected";
55     ASSERT_EQ(TokenTypeWhat(tok.first[3].type), "ID") << "ID expected";
56     ASSERT_EQ(TokenTypeWhat(tok.first[4].type), "OPERATION") << "OPERATION expected";
57     ASSERT_EQ(TokenTypeWhat(tok.first[5].type), "ID") << "ID expected";
58     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
59 }
60 
TEST(lexertests,test4)61 TEST(lexertests, test4)
62 {
63     Lexer l;
64     std::string s = "jmp Iasdfsadkfjhasifhsaiuhdacoisjdaociewhasdasdfkjasdfhjksadhfkhsakdfjhksajhdkfjhskhdfkjahhjdskaj";
65     Tokens tok = l.TokenizeString(s);
66     ASSERT_EQ(TokenTypeWhat(tok.first[0].type), "OPERATION") << "OPERATION expected";
67     ASSERT_EQ(TokenTypeWhat(tok.first[1].type), "ID") << "ID expected";
68     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
69 }
70 
TEST(lexertests,test5)71 TEST(lexertests, test5)
72 {
73     Lexer l;
74     std::string s = "call.short 1111, 1";
75     Tokens tok = l.TokenizeString(s);
76     ASSERT_EQ(TokenTypeWhat(tok.first[0].type), "OPERATION") << "OPERATION expected";
77     ASSERT_EQ(TokenTypeWhat(tok.first[1].type), "ID") << "ID expected";
78     ASSERT_EQ(TokenTypeWhat(tok.first[2].type), "DEL_COMMA") << "DEL_COMMA expected";
79     ASSERT_EQ(TokenTypeWhat(tok.first[3].type), "ID") << "ID expected";
80     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
81 }
82 
TEST(lexertests,test6)83 TEST(lexertests, test6)
84 {
85     Lexer l;
86     std::string s = "jle v1 met";
87     Tokens tok = l.TokenizeString(s);
88     ASSERT_EQ(TokenTypeWhat(tok.first[0].type), "OPERATION") << "OPERATION expected";
89     ASSERT_EQ(TokenTypeWhat(tok.first[1].type), "ID") << "ID expected";
90     ASSERT_EQ(TokenTypeWhat(tok.first[2].type), "ID") << "ID expected";
91     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
92 }
93 
TEST(lexertests,test7)94 TEST(lexertests, test7)
95 {
96     Lexer l;
97     std::string s = "label:";
98     Tokens tok = l.TokenizeString(s);
99     ASSERT_EQ(TokenTypeWhat(tok.first[0].type), "ID") << "ID expected";
100     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
101 }
102 
TEST(lexertests,test8)103 TEST(lexertests, test8)
104 {
105     Lexer l;
106     std::string s = ",";
107     Tokens tok = l.TokenizeString(s);
108     ASSERT_EQ(TokenTypeWhat(tok.first[0].type), "DEL_COMMA") << "DEL_COMMA expected";
109     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
110 }
111 
TEST(lexertests,test9)112 TEST(lexertests, test9)
113 {
114     Lexer l;
115     std::string s = ",:{}()<>=";
116     Tokens tok = l.TokenizeString(s);
117     ASSERT_EQ(TokenTypeWhat(tok.first[0].type), "DEL_COMMA") << "DEL_COMMA expected";
118     ASSERT_EQ(TokenTypeWhat(tok.first[1].type), "DEL_COLON") << "DEL_COMMA expected";
119     ASSERT_EQ(TokenTypeWhat(tok.first[2].type), "DEL_BRACE_L") << "DEL_COMMA expected";
120     ASSERT_EQ(TokenTypeWhat(tok.first[3].type), "DEL_BRACE_R") << "DEL_COMMA expected";
121     ASSERT_EQ(TokenTypeWhat(tok.first[4].type), "DEL_BRACKET_L") << "DEL_BRACKET_L expected";
122     ASSERT_EQ(TokenTypeWhat(tok.first[5].type), "DEL_BRACKET_R") << "DEL_BRACKET_R expected";
123     ASSERT_EQ(TokenTypeWhat(tok.first[6].type), "DEL_LT") << "DEL_LT expected";
124     ASSERT_EQ(TokenTypeWhat(tok.first[7].type), "DEL_GT") << "DEL_GT expected";
125     ASSERT_EQ(TokenTypeWhat(tok.first[8].type), "DEL_EQ") << "DEL_EQ expected";
126     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
127 }
128 
TEST(lexertests,test11)129 TEST(lexertests, test11)
130 {
131     Lexer l;
132     std::string s =
133         "i64.to.f32 alsdhashdjskhfka "
134         "shdkfhkasdhfkhsakdhfkshkfhskahlfkjsdfkjadskhfkshadkhfsdakhfksahdkfaksdfkhaskldhkfashdlfkjhasdkjfhklasjhdfklhsa"
135         "fhska";
136     Tokens tok = l.TokenizeString(s);
137     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
138 }
139 
TEST(lexertests,test12)140 TEST(lexertests, test12)
141 {
142     Lexer l;
143     std::string s = ".function asd(u32){}";
144     Tokens tok = l.TokenizeString(s);
145 
146     ASSERT_EQ(TokenTypeWhat(tok.first[0].type), "KEYWORD") << "KEYWORD expected";
147     ASSERT_EQ(TokenTypeWhat(tok.first[1].type), "ID") << "ID expected";
148     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
149 }
150 
TEST(lexertests,string_literal)151 TEST(lexertests, string_literal)
152 {
153     {
154         Lexer l;
155         std::string s = "\"123";
156         Tokens tok = l.TokenizeString(s);
157 
158         Error e = tok.second;
159 
160         ASSERT_EQ(e.err, Error::ErrorType::ERR_STRING_MISSING_TERMINATING_CHARACTER);
161     }
162 
163     {
164         Lexer l;
165         std::string s = "\"123\\\"";
166         Tokens tok = l.TokenizeString(s);
167 
168         Error e = tok.second;
169 
170         ASSERT_EQ(e.err, Error::ErrorType::ERR_STRING_MISSING_TERMINATING_CHARACTER);
171     }
172 
173     {
174         Lexer l;
175         std::string s = "\" a b \\ c d \"";
176         Tokens tok = l.TokenizeString(s);
177 
178         Error e = tok.second;
179 
180         ASSERT_EQ(e.err, Error::ErrorType::ERR_NONE);
181         ASSERT_EQ(tok.first.size(), 1U);
182         ASSERT_EQ(tok.first[0].type, Token::Type::ID_STRING);
183         ASSERT_EQ(tok.first[0].bound_left, 0U);
184         ASSERT_EQ(tok.first[0].bound_right, s.length());
185     }
186 
187     {
188         Lexer l;
189         std::string s = "\"abcd\"1234";
190         Tokens tok = l.TokenizeString(s);
191 
192         Error e = tok.second;
193 
194         ASSERT_EQ(e.err, Error::ErrorType::ERR_NONE);
195         ASSERT_EQ(tok.first.size(), 2U);
196         ASSERT_EQ(tok.first[0].type, Token::Type::ID_STRING);
197         ASSERT_EQ(tok.first[0].bound_left, 0U);
198         ASSERT_EQ(tok.first[0].bound_right, s.find('1'));
199     }
200 }
201 
TEST(lexertests,array_type)202 TEST(lexertests, array_type)
203 {
204     Lexer l;
205     std::string s = "i32[]";
206     Tokens tok = l.TokenizeString(s);
207 
208     Error e = tok.second;
209     ASSERT_EQ(e.err, Error::ErrorType::ERR_NONE);
210     ASSERT_EQ(tok.first.size(), 3U);
211     ASSERT_EQ(tok.first[0].type, Token::Type::ID);
212     ASSERT_EQ(tok.first[1].type, Token::Type::DEL_SQUARE_BRACKET_L);
213     ASSERT_EQ(tok.first[2].type, Token::Type::DEL_SQUARE_BRACKET_R);
214 }
215