1 // Copyright (c) 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "testing/gtest/include/gtest/gtest.h"
6 #include "tools/gn/input_file.h"
7 #include "tools/gn/token.h"
8 #include "tools/gn/tokenizer.h"
12 struct TokenExpectation
{
18 bool CheckTokenizer(const char* input
, const TokenExpectation (&expect
)[len
]) {
19 InputFile
input_file(SourceFile("/test"));
20 input_file
.SetContents(input
);
23 std::vector
<Token
> results
= Tokenizer::Tokenize(&input_file
, &err
);
25 if (results
.size() != len
)
27 for (size_t i
= 0; i
< len
; i
++) {
28 if (expect
[i
].type
!= results
[i
].type())
30 if (expect
[i
].value
!= results
[i
].value())
38 TEST(Tokenizer
, Empty
) {
39 InputFile
empty_string_input(SourceFile("/test"));
40 empty_string_input
.SetContents("");
43 std::vector
<Token
> results
= Tokenizer::Tokenize(&empty_string_input
, &err
);
44 EXPECT_TRUE(results
.empty());
46 InputFile
whitespace_input(SourceFile("/test"));
47 whitespace_input
.SetContents(" \r \n \r\n");
49 results
= Tokenizer::Tokenize(&whitespace_input
, &err
);
50 EXPECT_TRUE(results
.empty());
53 TEST(Tokenizer
, Identifier
) {
54 TokenExpectation one_ident
[] = {
55 { Token::IDENTIFIER
, "foo" }
57 EXPECT_TRUE(CheckTokenizer(" foo ", one_ident
));
60 TEST(Tokenizer
, Integer
) {
61 TokenExpectation integers
[] = {
62 { Token::INTEGER
, "123" },
63 { Token::INTEGER
, "-123" }
65 EXPECT_TRUE(CheckTokenizer(" 123 -123 ", integers
));
68 TEST(Tokenizer
, IntegerNoSpace
) {
69 TokenExpectation integers
[] = {
70 { Token::INTEGER
, "123" },
71 { Token::INTEGER
, "-123" }
73 EXPECT_TRUE(CheckTokenizer(" 123-123 ", integers
));
76 TEST(Tokenizer
, String
) {
77 TokenExpectation strings
[] = {
78 { Token::STRING
, "\"foo\"" },
79 { Token::STRING
, "\"bar\\\"baz\"" },
80 { Token::STRING
, "\"asdf\\\\\"" }
82 EXPECT_TRUE(CheckTokenizer(" \"foo\" \"bar\\\"baz\" \"asdf\\\\\" ",
86 TEST(Tokenizer
, Operator
) {
87 TokenExpectation operators
[] = {
88 { Token::MINUS
, "-" },
90 { Token::EQUAL
, "=" },
91 { Token::PLUS_EQUALS
, "+=" },
92 { Token::MINUS_EQUALS
, "-=" },
93 { Token::NOT_EQUAL
, "!=" },
94 { Token::EQUAL_EQUAL
, "==" },
95 { Token::LESS_THAN
, "<" },
96 { Token::GREATER_THAN
, ">" },
97 { Token::LESS_EQUAL
, "<=" },
98 { Token::GREATER_EQUAL
, ">=" },
100 { Token::BOOLEAN_OR
, "||" },
101 { Token::BOOLEAN_AND
, "&&" },
103 EXPECT_TRUE(CheckTokenizer("- + = += -= != == < > <= >= ! || &&",
107 TEST(Tokenizer
, Scoper
) {
108 TokenExpectation scopers
[] = {
109 { Token::LEFT_BRACE
, "{" },
110 { Token::LEFT_BRACKET
, "[" },
111 { Token::RIGHT_BRACKET
, "]" },
112 { Token::RIGHT_BRACE
, "}" },
113 { Token::LEFT_PAREN
, "(" },
114 { Token::RIGHT_PAREN
, ")" },
116 EXPECT_TRUE(CheckTokenizer("{[ ]} ()", scopers
));
119 TEST(Tokenizer
, FunctionCall
) {
120 TokenExpectation fn
[] = {
121 { Token::IDENTIFIER
, "fun" },
122 { Token::LEFT_PAREN
, "(" },
123 { Token::STRING
, "\"foo\"" },
124 { Token::RIGHT_PAREN
, ")" },
125 { Token::LEFT_BRACE
, "{" },
126 { Token::IDENTIFIER
, "foo" },
127 { Token::EQUAL
, "=" },
128 { Token::INTEGER
, "12" },
129 { Token::RIGHT_BRACE
, "}" },
131 EXPECT_TRUE(CheckTokenizer("fun(\"foo\") {\nfoo = 12}", fn
));
134 TEST(Tokenizer
, StringUnescaping
) {
135 InputFile
input(SourceFile("/test"));
136 input
.SetContents("\"asd\\\"f\" \"\"");
138 std::vector
<Token
> results
= Tokenizer::Tokenize(&input
, &err
);
140 ASSERT_EQ(2u, results
.size());
141 EXPECT_EQ("asd\"f", results
[0].StringValue());
142 EXPECT_EQ("", results
[1].StringValue());
145 TEST(Tokenizer
, Locations
) {
146 InputFile
input(SourceFile("/test"));
147 input
.SetContents("1 2 \"three\"\n 4");
149 std::vector
<Token
> results
= Tokenizer::Tokenize(&input
, &err
);
151 ASSERT_EQ(4u, results
.size());
152 ASSERT_TRUE(results
[0].location() == Location(&input
, 1, 1));
153 ASSERT_TRUE(results
[1].location() == Location(&input
, 1, 3));
154 ASSERT_TRUE(results
[2].location() == Location(&input
, 1, 5));
155 ASSERT_TRUE(results
[3].location() == Location(&input
, 2, 3));
158 TEST(Tokenizer
, ByteOffsetOfNthLine
) {
159 EXPECT_EQ(0u, Tokenizer::ByteOffsetOfNthLine("foo", 1));
161 // Windows and Posix have different line endings, so check the byte at the
162 // location rather than the offset.
163 char input1
[] = "aaa\nxaa\n\nya";
164 EXPECT_EQ('x', input1
[Tokenizer::ByteOffsetOfNthLine(input1
, 2)]);
165 EXPECT_EQ('y', input1
[Tokenizer::ByteOffsetOfNthLine(input1
, 4)]);
169 input2
[1] = '\n'; // Manually set to avoid Windows double-byte endings.
171 EXPECT_EQ(0u, Tokenizer::ByteOffsetOfNthLine(input2
, 1));
172 EXPECT_EQ(2u, Tokenizer::ByteOffsetOfNthLine(input2
, 2));