• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "testing/gtest/include/gtest/gtest.h"
6 #include "tools/gn/input_file.h"
7 #include "tools/gn/token.h"
8 #include "tools/gn/tokenizer.h"
9 
10 namespace {
11 
12 struct TokenExpectation {
13   Token::Type type;
14   const char* value;
15 };
16 
17 template<size_t len>
CheckTokenizer(const char * input,const TokenExpectation (& expect)[len])18 bool CheckTokenizer(const char* input, const TokenExpectation (&expect)[len]) {
19   InputFile input_file(SourceFile("/test"));
20   input_file.SetContents(input);
21 
22   Err err;
23   std::vector<Token> results = Tokenizer::Tokenize(&input_file, &err);
24 
25   if (results.size() != len)
26     return false;
27   for (size_t i = 0; i < len; i++) {
28     if (expect[i].type != results[i].type())
29       return false;
30     if (expect[i].value != results[i].value())
31       return false;
32   }
33   return true;
34 }
35 
36 }  // namespace
37 
TEST(Tokenizer,Empty)38 TEST(Tokenizer, Empty) {
39   InputFile empty_string_input(SourceFile("/test"));
40   empty_string_input.SetContents("");
41 
42   Err err;
43   std::vector<Token> results = Tokenizer::Tokenize(&empty_string_input, &err);
44   EXPECT_TRUE(results.empty());
45 
46   InputFile whitespace_input(SourceFile("/test"));
47   whitespace_input.SetContents("  \r \n \r\n");
48 
49   results = Tokenizer::Tokenize(&whitespace_input, &err);
50   EXPECT_TRUE(results.empty());
51 }
52 
TEST(Tokenizer,Identifier)53 TEST(Tokenizer, Identifier) {
54   TokenExpectation one_ident[] = {
55     { Token::IDENTIFIER, "foo" }
56   };
57   EXPECT_TRUE(CheckTokenizer("  foo ", one_ident));
58 }
59 
TEST(Tokenizer,Integer)60 TEST(Tokenizer, Integer) {
61   TokenExpectation integers[] = {
62     { Token::INTEGER, "123" },
63     { Token::INTEGER, "-123" }
64   };
65   EXPECT_TRUE(CheckTokenizer("  123 -123 ", integers));
66 }
67 
TEST(Tokenizer,IntegerNoSpace)68 TEST(Tokenizer, IntegerNoSpace) {
69   TokenExpectation integers[] = {
70     { Token::INTEGER, "123" },
71     { Token::INTEGER, "-123" }
72   };
73   EXPECT_TRUE(CheckTokenizer("  123-123 ", integers));
74 }
75 
TEST(Tokenizer,String)76 TEST(Tokenizer, String) {
77   TokenExpectation strings[] = {
78     { Token::STRING, "\"foo\"" },
79     { Token::STRING, "\"bar\\\"baz\"" },
80     { Token::STRING, "\"asdf\\\\\"" }
81   };
82   EXPECT_TRUE(CheckTokenizer("  \"foo\" \"bar\\\"baz\" \"asdf\\\\\" ",
83               strings));
84 }
85 
TEST(Tokenizer,Operator)86 TEST(Tokenizer, Operator) {
87   TokenExpectation operators[] = {
88     { Token::MINUS, "-" },
89     { Token::PLUS, "+" },
90     { Token::EQUAL, "=" },
91     { Token::PLUS_EQUALS, "+=" },
92     { Token::MINUS_EQUALS, "-=" },
93     { Token::NOT_EQUAL, "!=" },
94     { Token::EQUAL_EQUAL, "==" },
95     { Token::LESS_THAN, "<" },
96     { Token::GREATER_THAN, ">" },
97     { Token::LESS_EQUAL, "<=" },
98     { Token::GREATER_EQUAL, ">=" },
99     { Token::BANG, "!" },
100     { Token::BOOLEAN_OR, "||" },
101     { Token::BOOLEAN_AND, "&&" },
102   };
103   EXPECT_TRUE(CheckTokenizer("- + = += -= != ==  < > <= >= ! || &&",
104               operators));
105 }
106 
TEST(Tokenizer,Scoper)107 TEST(Tokenizer, Scoper) {
108   TokenExpectation scopers[] = {
109     { Token::LEFT_BRACE, "{" },
110     { Token::LEFT_BRACKET, "[" },
111     { Token::RIGHT_BRACKET, "]" },
112     { Token::RIGHT_BRACE, "}" },
113     { Token::LEFT_PAREN, "(" },
114     { Token::RIGHT_PAREN, ")" },
115   };
116   EXPECT_TRUE(CheckTokenizer("{[ ]} ()", scopers));
117 }
118 
TEST(Tokenizer,FunctionCall)119 TEST(Tokenizer, FunctionCall) {
120   TokenExpectation fn[] = {
121     { Token::IDENTIFIER, "fun" },
122     { Token::LEFT_PAREN, "(" },
123     { Token::STRING, "\"foo\"" },
124     { Token::RIGHT_PAREN, ")" },
125     { Token::LEFT_BRACE, "{" },
126     { Token::IDENTIFIER, "foo" },
127     { Token::EQUAL, "=" },
128     { Token::INTEGER, "12" },
129     { Token::RIGHT_BRACE, "}" },
130   };
131   EXPECT_TRUE(CheckTokenizer("fun(\"foo\") {\nfoo = 12}", fn));
132 }
133 
TEST(Tokenizer,StringUnescaping)134 TEST(Tokenizer, StringUnescaping) {
135   InputFile input(SourceFile("/test"));
136   input.SetContents("\"asd\\\"f\" \"\"");
137   Err err;
138   std::vector<Token> results = Tokenizer::Tokenize(&input, &err);
139 
140   ASSERT_EQ(2u, results.size());
141   EXPECT_EQ("asd\"f", results[0].StringValue());
142   EXPECT_EQ("", results[1].StringValue());
143 }
144 
TEST(Tokenizer,Locations)145 TEST(Tokenizer, Locations) {
146   InputFile input(SourceFile("/test"));
147   input.SetContents("1 2 \"three\"\n  4");
148   Err err;
149   std::vector<Token> results = Tokenizer::Tokenize(&input, &err);
150 
151   ASSERT_EQ(4u, results.size());
152   ASSERT_TRUE(results[0].location() == Location(&input, 1, 1));
153   ASSERT_TRUE(results[1].location() == Location(&input, 1, 3));
154   ASSERT_TRUE(results[2].location() == Location(&input, 1, 5));
155   ASSERT_TRUE(results[3].location() == Location(&input, 2, 3));
156 }
157 
TEST(Tokenizer,ByteOffsetOfNthLine)158 TEST(Tokenizer, ByteOffsetOfNthLine) {
159   EXPECT_EQ(0u, Tokenizer::ByteOffsetOfNthLine("foo", 1));
160 
161   // Windows and Posix have different line endings, so check the byte at the
162   // location rather than the offset.
163   char input1[] = "aaa\nxaa\n\nya";
164   EXPECT_EQ('x', input1[Tokenizer::ByteOffsetOfNthLine(input1, 2)]);
165   EXPECT_EQ('y', input1[Tokenizer::ByteOffsetOfNthLine(input1, 4)]);
166 
167   char input2[3];
168   input2[0] = 'a';
169   input2[1] = '\n';  // Manually set to avoid Windows double-byte endings.
170   input2[2] = 0;
171   EXPECT_EQ(0u, Tokenizer::ByteOffsetOfNthLine(input2, 1));
172   EXPECT_EQ(2u, Tokenizer::ByteOffsetOfNthLine(input2, 2));
173 }
174