1 // Copyright 2009 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
5 // This package defines constants representing the lexical
6 // tokens of the Go programming language and basic operations
7 // on tokens (printing, predicates).
17 // Token is the set of lexical tokens of the Go programming language.
20 // The list of tokens.
28 // Identifiers and basic type literals
29 // (these tokens stand for classes of literals)
39 // Operators and delimiters
132 // At the moment we have no array literal syntax that lets us describe
133 // the index for each element - use a map for now to make sure they are
135 var tokens
= map[Token
]string{
172 AND_NOT_ASSIGN
: "&^=",
208 CONTINUE
: "continue",
213 FALLTHROUGH
: "fallthrough",
222 INTERFACE
: "interface",
236 // String returns the string corresponding to the token tok.
237 // For operators, delimiters, and keywords the string is the actual
238 // token character sequence (e.g., for the token ADD, the string is
239 // "+"). For all other tokens the string corresponds to the token
240 // constant name (e.g. for the token IDENT, the string is "IDENT").
242 func (tok Token
) String() string {
243 if str
, exists
:= tokens
[tok
]; exists
{
246 return "token(" + strconv
.Itoa(int(tok
)) + ")"
250 // A set of constants for precedence-based expression parsing.
251 // Non-operators have lowest precedence, followed by operators
252 // starting with precedence 1 up to unary operators. The highest
253 // precedence corresponds serves as "catch-all" precedence for
254 // selector, indexing, and other operator and delimiter tokens.
257 LowestPrec
= 0 // non-operators
263 // Precedence returns the operator precedence of the binary
264 // operator op. If op is not a binary operator, the result
265 // is LowestPrecedence.
267 func (op Token
) Precedence() int {
275 case EQL
, NEQ
, LSS
, LEQ
, GTR
, GEQ
:
277 case ADD
, SUB
, OR
, XOR
:
279 case MUL
, QUO
, REM
, SHL
, SHR
, AND
, AND_NOT
:
286 var keywords
map[string]Token
289 keywords
= make(map[string]Token
)
290 for i
:= keyword_beg
+ 1; i
< keyword_end
; i
++ {
291 keywords
[tokens
[i
]] = i
296 // Lookup maps an identifier to its keyword token or IDENT (if not a keyword).
298 func Lookup(ident
[]byte) Token
{
299 // TODO Maps with []byte key are illegal because []byte does not
300 // support == . Should find a more efficient solution eventually.
301 if tok
, is_keyword
:= keywords
[string(ident
)]; is_keyword
{
310 // IsLiteral returns true for tokens corresponding to identifiers
311 // and basic type literals; returns false otherwise.
313 func (tok Token
) IsLiteral() bool { return literal_beg
< tok
&& tok
< literal_end
}
315 // IsOperator returns true for tokens corresponding to operators and
316 // delimiters; returns false otherwise.
318 func (tok Token
) IsOperator() bool { return operator_beg
< tok
&& tok
< operator_end
}
320 // IsKeyword returns true for tokens corresponding to keywords;
321 // returns false otherwise.
323 func (tok Token
) IsKeyword() bool { return keyword_beg
< tok
&& tok
< keyword_end
}
326 // Token source positions are represented by a Position value.
327 // A Position is valid if the line number is > 0.
329 type Position
struct {
330 Filename
string // filename, if any
331 Offset
int // byte offset, starting at 0
332 Line
int // line number, starting at 1
333 Column
int // column number, starting at 1 (character count)
337 // Pos is an accessor method for anonymous Position fields.
338 // It returns its receiver.
340 func (pos
*Position
) Pos() Position
{ return *pos
}
343 // IsValid returns true if the position is valid.
344 func (pos
*Position
) IsValid() bool { return pos
.Line
> 0 }
347 func (pos Position
) String() string {
353 s
+= fmt
.Sprintf("%d:%d", pos
.Line
, pos
.Column
)