Merge from mainline (167278:168000).
[official-gcc/graphite-test-results.git] / libgo / go / go / token / token.go
blobbc6c6a865b2a62d0d1f2f9653a3069a3279a051e
1 // Copyright 2009 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
5 // This package defines constants representing the lexical
6 // tokens of the Go programming language and basic operations
7 // on tokens (printing, predicates).
8 //
9 package token
11 import (
12 "fmt"
13 "strconv"
17 // Token is the set of lexical tokens of the Go programming language.
18 type Token int
20 // The list of tokens.
21 const (
22 // Special tokens
23 ILLEGAL Token = iota
24 EOF
25 COMMENT
27 literal_beg
28 // Identifiers and basic type literals
29 // (these tokens stand for classes of literals)
30 IDENT // main
31 INT // 12345
32 FLOAT // 123.45
33 IMAG // 123.45i
34 CHAR // 'a'
35 STRING // "abc"
36 literal_end
38 operator_beg
39 // Operators and delimiters
40 ADD // +
41 SUB // -
42 MUL // *
43 QUO // /
44 REM // %
46 AND // &
47 OR // |
48 XOR // ^
49 SHL // <<
50 SHR // >>
51 AND_NOT // &^
53 ADD_ASSIGN // +=
54 SUB_ASSIGN // -=
55 MUL_ASSIGN // *=
56 QUO_ASSIGN // /=
57 REM_ASSIGN // %=
59 AND_ASSIGN // &=
60 OR_ASSIGN // |=
61 XOR_ASSIGN // ^=
62 SHL_ASSIGN // <<=
63 SHR_ASSIGN // >>=
64 AND_NOT_ASSIGN // &^=
66 LAND // &&
67 LOR // ||
68 ARROW // <-
69 INC // ++
70 DEC // --
72 EQL // ==
73 LSS // <
74 GTR // >
75 ASSIGN // =
76 NOT // !
78 NEQ // !=
79 LEQ // <=
80 GEQ // >=
81 DEFINE // :=
82 ELLIPSIS // ...
84 LPAREN // (
85 LBRACK // [
86 LBRACE // {
87 COMMA // ,
88 PERIOD // .
90 RPAREN // )
91 RBRACK // ]
92 RBRACE // }
93 SEMICOLON // ;
94 COLON // :
95 operator_end
97 keyword_beg
98 // Keywords
99 BREAK
100 CASE
101 CHAN
102 CONST
103 CONTINUE
105 DEFAULT
106 DEFER
107 ELSE
108 FALLTHROUGH
111 FUNC
113 GOTO
115 IMPORT
117 INTERFACE
119 PACKAGE
120 RANGE
121 RETURN
123 SELECT
124 STRUCT
125 SWITCH
126 TYPE
128 keyword_end
132 // At the moment we have no array literal syntax that lets us describe
133 // the index for each element - use a map for now to make sure they are
134 // in sync.
135 var tokens = map[Token]string{
136 ILLEGAL: "ILLEGAL",
138 EOF: "EOF",
139 COMMENT: "COMMENT",
141 IDENT: "IDENT",
142 INT: "INT",
143 FLOAT: "FLOAT",
144 IMAG: "IMAG",
145 CHAR: "CHAR",
146 STRING: "STRING",
148 ADD: "+",
149 SUB: "-",
150 MUL: "*",
151 QUO: "/",
152 REM: "%",
154 AND: "&",
155 OR: "|",
156 XOR: "^",
157 SHL: "<<",
158 SHR: ">>",
159 AND_NOT: "&^",
161 ADD_ASSIGN: "+=",
162 SUB_ASSIGN: "-=",
163 MUL_ASSIGN: "*=",
164 QUO_ASSIGN: "/=",
165 REM_ASSIGN: "%=",
167 AND_ASSIGN: "&=",
168 OR_ASSIGN: "|=",
169 XOR_ASSIGN: "^=",
170 SHL_ASSIGN: "<<=",
171 SHR_ASSIGN: ">>=",
172 AND_NOT_ASSIGN: "&^=",
174 LAND: "&&",
175 LOR: "||",
176 ARROW: "<-",
177 INC: "++",
178 DEC: "--",
180 EQL: "==",
181 LSS: "<",
182 GTR: ">",
183 ASSIGN: "=",
184 NOT: "!",
186 NEQ: "!=",
187 LEQ: "<=",
188 GEQ: ">=",
189 DEFINE: ":=",
190 ELLIPSIS: "...",
192 LPAREN: "(",
193 LBRACK: "[",
194 LBRACE: "{",
195 COMMA: ",",
196 PERIOD: ".",
198 RPAREN: ")",
199 RBRACK: "]",
200 RBRACE: "}",
201 SEMICOLON: ";",
202 COLON: ":",
204 BREAK: "break",
205 CASE: "case",
206 CHAN: "chan",
207 CONST: "const",
208 CONTINUE: "continue",
210 DEFAULT: "default",
211 DEFER: "defer",
212 ELSE: "else",
213 FALLTHROUGH: "fallthrough",
214 FOR: "for",
216 FUNC: "func",
217 GO: "go",
218 GOTO: "goto",
219 IF: "if",
220 IMPORT: "import",
222 INTERFACE: "interface",
223 MAP: "map",
224 PACKAGE: "package",
225 RANGE: "range",
226 RETURN: "return",
228 SELECT: "select",
229 STRUCT: "struct",
230 SWITCH: "switch",
231 TYPE: "type",
232 VAR: "var",
236 // String returns the string corresponding to the token tok.
237 // For operators, delimiters, and keywords the string is the actual
238 // token character sequence (e.g., for the token ADD, the string is
239 // "+"). For all other tokens the string corresponds to the token
240 // constant name (e.g. for the token IDENT, the string is "IDENT").
242 func (tok Token) String() string {
243 if str, exists := tokens[tok]; exists {
244 return str
246 return "token(" + strconv.Itoa(int(tok)) + ")"
250 // A set of constants for precedence-based expression parsing.
251 // Non-operators have lowest precedence, followed by operators
252 // starting with precedence 1 up to unary operators. The highest
253 // precedence corresponds serves as "catch-all" precedence for
254 // selector, indexing, and other operator and delimiter tokens.
256 const (
257 LowestPrec = 0 // non-operators
258 UnaryPrec = 7
259 HighestPrec = 8
263 // Precedence returns the operator precedence of the binary
264 // operator op. If op is not a binary operator, the result
265 // is LowestPrecedence.
267 func (op Token) Precedence() int {
268 switch op {
269 case LOR:
270 return 1
271 case LAND:
272 return 2
273 case ARROW:
274 return 3
275 case EQL, NEQ, LSS, LEQ, GTR, GEQ:
276 return 4
277 case ADD, SUB, OR, XOR:
278 return 5
279 case MUL, QUO, REM, SHL, SHR, AND, AND_NOT:
280 return 6
282 return LowestPrec
286 var keywords map[string]Token
288 func init() {
289 keywords = make(map[string]Token)
290 for i := keyword_beg + 1; i < keyword_end; i++ {
291 keywords[tokens[i]] = i
296 // Lookup maps an identifier to its keyword token or IDENT (if not a keyword).
298 func Lookup(ident []byte) Token {
299 // TODO Maps with []byte key are illegal because []byte does not
300 // support == . Should find a more efficient solution eventually.
301 if tok, is_keyword := keywords[string(ident)]; is_keyword {
302 return tok
304 return IDENT
308 // Predicates
310 // IsLiteral returns true for tokens corresponding to identifiers
311 // and basic type literals; returns false otherwise.
313 func (tok Token) IsLiteral() bool { return literal_beg < tok && tok < literal_end }
315 // IsOperator returns true for tokens corresponding to operators and
316 // delimiters; returns false otherwise.
318 func (tok Token) IsOperator() bool { return operator_beg < tok && tok < operator_end }
320 // IsKeyword returns true for tokens corresponding to keywords;
321 // returns false otherwise.
323 func (tok Token) IsKeyword() bool { return keyword_beg < tok && tok < keyword_end }
326 // Token source positions are represented by a Position value.
327 // A Position is valid if the line number is > 0.
329 type Position struct {
330 Filename string // filename, if any
331 Offset int // byte offset, starting at 0
332 Line int // line number, starting at 1
333 Column int // column number, starting at 1 (character count)
337 // Pos is an accessor method for anonymous Position fields.
338 // It returns its receiver.
340 func (pos *Position) Pos() Position { return *pos }
343 // IsValid returns true if the position is valid.
344 func (pos *Position) IsValid() bool { return pos.Line > 0 }
347 func (pos Position) String() string {
348 s := pos.Filename
349 if pos.IsValid() {
350 if s != "" {
351 s += ":"
353 s += fmt.Sprintf("%d:%d", pos.Line, pos.Column)
355 if s == "" {
356 s = "-"
358 return s