• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include <iostream>
17 #include <string>
18 
19 #include <gtest/gtest.h>
20 #include "../define.h"
21 #include "../lexer.h"
22 
23 namespace panda::test {
24 
25 using namespace panda::pandasm;
26 
TEST(lexertests,test1)27 TEST(lexertests, test1)
28 {
29     Lexer l;
30     std::string s = "mov v1, v2";
31     Tokens tok = l.TokenizeString(s);
32     ASSERT_EQ(TokenTypeWhat(tok.first[0].type), "OPERATION") << "OPERATION expected";
33     ASSERT_EQ(TokenTypeWhat(tok.first[1].type), "ID") << "ID expected";
34     ASSERT_EQ(TokenTypeWhat(tok.first[2].type), "DEL_COMMA") << "DEL_COMMA expected";
35     ASSERT_EQ(TokenTypeWhat(tok.first[3].type), "ID") << "ID expected";
36     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
37 }
38 
TEST(lexertests,test2)39 TEST(lexertests, test2)
40 {
41     Lexer l;
42     std::string s = "ldai 1";
43     Tokens tok = l.TokenizeString(s);
44     ASSERT_EQ(TokenTypeWhat(tok.first[0].type), "OPERATION") << "OPERATION expected";
45     ASSERT_EQ(TokenTypeWhat(tok.first[1].type), "ID") << "ID expected";
46     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
47 }
48 
TEST(lexertests,test3)49 TEST(lexertests, test3)
50 {
51     Lexer l;
52     std::string s = "movi\nlda v2 v10 mov v2";
53     Tokens tok = l.TokenizeString(s);
54     ASSERT_EQ(TokenTypeWhat(tok.first[0].type), "OPERATION") << "OPERATION expected";
55     ASSERT_EQ(TokenTypeWhat(tok.first[1].type), "OPERATION") << "OPERATION expected";
56     ASSERT_EQ(TokenTypeWhat(tok.first[2].type), "ID") << "ID expected";
57     ASSERT_EQ(TokenTypeWhat(tok.first[3].type), "ID") << "ID expected";
58     ASSERT_EQ(TokenTypeWhat(tok.first[4].type), "OPERATION") << "OPERATION expected";
59     ASSERT_EQ(TokenTypeWhat(tok.first[5].type), "ID") << "ID expected";
60     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
61 }
62 
TEST(lexertests,test4)63 TEST(lexertests, test4)
64 {
65     Lexer l;
66     std::string s = "jmp Iasdfsadkfjhasifhsaiuhdacoisjdaociewhasdasdfkjasdfhjksadhfkhsakdfjhksajhdkfjhskhdfkjahhjdskaj";
67     Tokens tok = l.TokenizeString(s);
68     ASSERT_EQ(TokenTypeWhat(tok.first[0].type), "OPERATION") << "OPERATION expected";
69     ASSERT_EQ(TokenTypeWhat(tok.first[1].type), "ID") << "ID expected";
70     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
71 }
72 
TEST(lexertests,test5)73 TEST(lexertests, test5)
74 {
75     Lexer l;
76     std::string s = "call.short 1111, 1";
77     Tokens tok = l.TokenizeString(s);
78     ASSERT_EQ(TokenTypeWhat(tok.first[0].type), "OPERATION") << "OPERATION expected";
79     ASSERT_EQ(TokenTypeWhat(tok.first[1].type), "ID") << "ID expected";
80     ASSERT_EQ(TokenTypeWhat(tok.first[2].type), "DEL_COMMA") << "DEL_COMMA expected";
81     ASSERT_EQ(TokenTypeWhat(tok.first[3].type), "ID") << "ID expected";
82     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
83 }
84 
TEST(lexertests,test6)85 TEST(lexertests, test6)
86 {
87     Lexer l;
88     std::string s = "jle v1 met";
89     Tokens tok = l.TokenizeString(s);
90     ASSERT_EQ(TokenTypeWhat(tok.first[0].type), "OPERATION") << "OPERATION expected";
91     ASSERT_EQ(TokenTypeWhat(tok.first[1].type), "ID") << "ID expected";
92     ASSERT_EQ(TokenTypeWhat(tok.first[2].type), "ID") << "ID expected";
93     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
94 }
95 
TEST(lexertests,test7)96 TEST(lexertests, test7)
97 {
98     Lexer l;
99     std::string s = "label:";
100     Tokens tok = l.TokenizeString(s);
101     ASSERT_EQ(TokenTypeWhat(tok.first[0].type), "ID") << "ID expected";
102     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
103 }
104 
TEST(lexertests,test8)105 TEST(lexertests, test8)
106 {
107     Lexer l;
108     std::string s = ",";
109     Tokens tok = l.TokenizeString(s);
110     ASSERT_EQ(TokenTypeWhat(tok.first[0].type), "DEL_COMMA") << "DEL_COMMA expected";
111     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
112 }
113 
TEST(lexertests,test9)114 TEST(lexertests, test9)
115 {
116     Lexer l;
117     std::string s = ",:{}()<>=";
118     Tokens tok = l.TokenizeString(s);
119     ASSERT_EQ(TokenTypeWhat(tok.first[0].type), "DEL_COMMA") << "DEL_COMMA expected";
120     ASSERT_EQ(TokenTypeWhat(tok.first[1].type), "DEL_COLON") << "DEL_COMMA expected";
121     ASSERT_EQ(TokenTypeWhat(tok.first[2].type), "DEL_BRACE_L") << "DEL_COMMA expected";
122     ASSERT_EQ(TokenTypeWhat(tok.first[3].type), "DEL_BRACE_R") << "DEL_COMMA expected";
123     ASSERT_EQ(TokenTypeWhat(tok.first[4].type), "DEL_BRACKET_L") << "DEL_BRACKET_L expected";
124     ASSERT_EQ(TokenTypeWhat(tok.first[5].type), "DEL_BRACKET_R") << "DEL_BRACKET_R expected";
125     ASSERT_EQ(TokenTypeWhat(tok.first[6].type), "DEL_LT") << "DEL_LT expected";
126     ASSERT_EQ(TokenTypeWhat(tok.first[7].type), "DEL_GT") << "DEL_GT expected";
127     ASSERT_EQ(TokenTypeWhat(tok.first[8].type), "DEL_EQ") << "DEL_EQ expected";
128     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
129 }
130 
TEST(lexertests,test11)131 TEST(lexertests, test11)
132 {
133     Lexer l;
134     std::string s =
135         "i64.to.f32 alsdhashdjskhfka "
136         "shdkfhkasdhfkhsakdhfkshkfhskahlfkjsdfkjadskhfkshadkhfsdakhfksahdkfaksdfkhaskldhkfashdlfkjhasdkjfhklasjhdfklhsa"
137         "fhska";
138     Tokens tok = l.TokenizeString(s);
139     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
140 }
141 
TEST(lexertests,test12)142 TEST(lexertests, test12)
143 {
144     Lexer l;
145     std::string s = ".function asd(u32){}";
146     Tokens tok = l.TokenizeString(s);
147     ASSERT_EQ(TokenTypeWhat(tok.first[0].type), "KEYWORD") << "KEYWORD expected";
148     ASSERT_EQ(TokenTypeWhat(tok.first[1].type), "ID") << "ID expected";
149     ASSERT_EQ(tok.second.err, Error::ErrorType::ERR_NONE) << "ERR_NONE expected";
150 }
151 
TEST(lexertests,string_literal)152 TEST(lexertests, string_literal)
153 {
154     {
155         Lexer l;
156         std::string s = "\"123";
157         Tokens tok = l.TokenizeString(s);
158 
159         Error e = tok.second;
160 
161         ASSERT_EQ(e.err, Error::ErrorType::ERR_STRING_MISSING_TERMINATING_CHARACTER);
162     }
163 
164     {
165         Lexer l;
166         std::string s = "\"123\\\"";
167         Tokens tok = l.TokenizeString(s);
168 
169         Error e = tok.second;
170 
171         ASSERT_EQ(e.err, Error::ErrorType::ERR_STRING_MISSING_TERMINATING_CHARACTER);
172     }
173 
174     {
175         Lexer l;
176         std::string s = "\" a b \\ c d \"";
177         Tokens tok = l.TokenizeString(s);
178 
179         Error e = tok.second;
180 
181         ASSERT_EQ(e.err, Error::ErrorType::ERR_NONE);
182         ASSERT_EQ(tok.first.size(), 1U);
183         ASSERT_EQ(tok.first[0].type, Token::Type::ID_STRING);
184         ASSERT_EQ(tok.first[0].bound_left, 0U);
185         ASSERT_EQ(tok.first[0].bound_right, s.length());
186     }
187 
188     {
189         Lexer l;
190         std::string s = "\"abcd\"1234";
191         Tokens tok = l.TokenizeString(s);
192 
193         Error e = tok.second;
194 
195         ASSERT_EQ(e.err, Error::ErrorType::ERR_NONE);
196         ASSERT_EQ(tok.first.size(), 2U);
197         ASSERT_EQ(tok.first[0].type, Token::Type::ID_STRING);
198         ASSERT_EQ(tok.first[0].bound_left, 0U);
199         ASSERT_EQ(tok.first[0].bound_right, s.find('1'));
200     }
201 }
202 
TEST(lexertests,array_type)203 TEST(lexertests, array_type)
204 {
205     Lexer l;
206     std::string s = "i32[]";
207     Tokens tok = l.TokenizeString(s);
208 
209     Error e = tok.second;
210     ASSERT_EQ(e.err, Error::ErrorType::ERR_NONE);
211     ASSERT_EQ(tok.first.size(), 3U);
212     ASSERT_EQ(tok.first[0].type, Token::Type::ID);
213     ASSERT_EQ(tok.first[1].type, Token::Type::DEL_SQUARE_BRACKET_L);
214     ASSERT_EQ(tok.first[2].type, Token::Type::DEL_SQUARE_BRACKET_R);
215 }
216 
217 }  // namespace panda::test