Partially support streaming of poly_int for offloading.
[official-gcc.git] / libgo / go / go / parser / parser.go
blob51a3c3e67f9d8dbe9733a449c96f6475715530fe
1 // Copyright 2009 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
5 // Package parser implements a parser for Go source files. Input may be
6 // provided in a variety of forms (see the various Parse* functions); the
7 // output is an abstract syntax tree (AST) representing the Go source. The
8 // parser is invoked through one of the Parse* functions.
9 //
10 // The parser accepts a larger language than is syntactically permitted by
11 // the Go spec, for simplicity, and for improved robustness in the presence
12 // of syntax errors. For instance, in method declarations, the receiver is
13 // treated like an ordinary parameter list and thus may contain multiple
14 // entries where the spec permits exactly one. Consequently, the corresponding
15 // field in the AST (ast.FuncDecl.Recv) field is not restricted to one entry.
17 package parser
19 import (
20 "fmt"
21 "go/ast"
22 "go/internal/typeparams"
23 "go/scanner"
24 "go/token"
25 "strconv"
26 "strings"
27 "unicode"
30 // The parser structure holds the parser's internal state.
31 type parser struct {
32 file *token.File
33 errors scanner.ErrorList
34 scanner scanner.Scanner
36 // Tracing/debugging
37 mode Mode // parsing mode
38 trace bool // == (mode&Trace != 0)
39 indent int // indentation used for tracing output
41 // Comments
42 comments []*ast.CommentGroup
43 leadComment *ast.CommentGroup // last lead comment
44 lineComment *ast.CommentGroup // last line comment
46 // Next token
47 pos token.Pos // token position
48 tok token.Token // one token look-ahead
49 lit string // token literal
51 // Error recovery
52 // (used to limit the number of calls to parser.advance
53 // w/o making scanning progress - avoids potential endless
54 // loops across multiple parser functions during error recovery)
55 syncPos token.Pos // last synchronization position
56 syncCnt int // number of parser.advance calls without progress
58 // Non-syntactic parser control
59 exprLev int // < 0: in control clause, >= 0: in expression
60 inRhs bool // if set, the parser is parsing a rhs expression
62 imports []*ast.ImportSpec // list of imports
65 func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode Mode) {
66 p.file = fset.AddFile(filename, -1, len(src))
67 var m scanner.Mode
68 if mode&ParseComments != 0 {
69 m = scanner.ScanComments
71 eh := func(pos token.Position, msg string) { p.errors.Add(pos, msg) }
72 p.scanner.Init(p.file, src, eh, m)
74 p.mode = mode
75 p.trace = mode&Trace != 0 // for convenience (p.trace is used frequently)
76 p.next()
79 func (p *parser) allowGenerics() bool { return p.mode&typeparams.DisallowParsing == 0 }
80 func (p *parser) allowTypeSets() bool { return p.mode&typeparams.DisallowTypeSets == 0 }
82 // ----------------------------------------------------------------------------
83 // Parsing support
85 func (p *parser) printTrace(a ...any) {
86 const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . "
87 const n = len(dots)
88 pos := p.file.Position(p.pos)
89 fmt.Printf("%5d:%3d: ", pos.Line, pos.Column)
90 i := 2 * p.indent
91 for i > n {
92 fmt.Print(dots)
93 i -= n
95 // i <= n
96 fmt.Print(dots[0:i])
97 fmt.Println(a...)
100 func trace(p *parser, msg string) *parser {
101 p.printTrace(msg, "(")
102 p.indent++
103 return p
106 // Usage pattern: defer un(trace(p, "..."))
107 func un(p *parser) {
108 p.indent--
109 p.printTrace(")")
112 // Advance to the next token.
113 func (p *parser) next0() {
114 // Because of one-token look-ahead, print the previous token
115 // when tracing as it provides a more readable output. The
116 // very first token (!p.pos.IsValid()) is not initialized
117 // (it is token.ILLEGAL), so don't print it.
118 if p.trace && p.pos.IsValid() {
119 s := p.tok.String()
120 switch {
121 case p.tok.IsLiteral():
122 p.printTrace(s, p.lit)
123 case p.tok.IsOperator(), p.tok.IsKeyword():
124 p.printTrace("\"" + s + "\"")
125 default:
126 p.printTrace(s)
130 p.pos, p.tok, p.lit = p.scanner.Scan()
133 // Consume a comment and return it and the line on which it ends.
134 func (p *parser) consumeComment() (comment *ast.Comment, endline int) {
135 // /*-style comments may end on a different line than where they start.
136 // Scan the comment for '\n' chars and adjust endline accordingly.
137 endline = p.file.Line(p.pos)
138 if p.lit[1] == '*' {
139 // don't use range here - no need to decode Unicode code points
140 for i := 0; i < len(p.lit); i++ {
141 if p.lit[i] == '\n' {
142 endline++
147 comment = &ast.Comment{Slash: p.pos, Text: p.lit}
148 p.next0()
150 return
153 // Consume a group of adjacent comments, add it to the parser's
154 // comments list, and return it together with the line at which
155 // the last comment in the group ends. A non-comment token or n
156 // empty lines terminate a comment group.
158 func (p *parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline int) {
159 var list []*ast.Comment
160 endline = p.file.Line(p.pos)
161 for p.tok == token.COMMENT && p.file.Line(p.pos) <= endline+n {
162 var comment *ast.Comment
163 comment, endline = p.consumeComment()
164 list = append(list, comment)
167 // add comment group to the comments list
168 comments = &ast.CommentGroup{List: list}
169 p.comments = append(p.comments, comments)
171 return
174 // Advance to the next non-comment token. In the process, collect
175 // any comment groups encountered, and remember the last lead and
176 // line comments.
178 // A lead comment is a comment group that starts and ends in a
179 // line without any other tokens and that is followed by a non-comment
180 // token on the line immediately after the comment group.
182 // A line comment is a comment group that follows a non-comment
183 // token on the same line, and that has no tokens after it on the line
184 // where it ends.
186 // Lead and line comments may be considered documentation that is
187 // stored in the AST.
189 func (p *parser) next() {
190 p.leadComment = nil
191 p.lineComment = nil
192 prev := p.pos
193 p.next0()
195 if p.tok == token.COMMENT {
196 var comment *ast.CommentGroup
197 var endline int
199 if p.file.Line(p.pos) == p.file.Line(prev) {
200 // The comment is on same line as the previous token; it
201 // cannot be a lead comment but may be a line comment.
202 comment, endline = p.consumeCommentGroup(0)
203 if p.file.Line(p.pos) != endline || p.tok == token.EOF {
204 // The next token is on a different line, thus
205 // the last comment group is a line comment.
206 p.lineComment = comment
210 // consume successor comments, if any
211 endline = -1
212 for p.tok == token.COMMENT {
213 comment, endline = p.consumeCommentGroup(1)
216 if endline+1 == p.file.Line(p.pos) {
217 // The next token is following on the line immediately after the
218 // comment group, thus the last comment group is a lead comment.
219 p.leadComment = comment
224 // A bailout panic is raised to indicate early termination.
225 type bailout struct{}
227 func (p *parser) error(pos token.Pos, msg string) {
228 if p.trace {
229 defer un(trace(p, "error: "+msg))
232 epos := p.file.Position(pos)
234 // If AllErrors is not set, discard errors reported on the same line
235 // as the last recorded error and stop parsing if there are more than
236 // 10 errors.
237 if p.mode&AllErrors == 0 {
238 n := len(p.errors)
239 if n > 0 && p.errors[n-1].Pos.Line == epos.Line {
240 return // discard - likely a spurious error
242 if n > 10 {
243 panic(bailout{})
247 p.errors.Add(epos, msg)
250 func (p *parser) errorExpected(pos token.Pos, msg string) {
251 msg = "expected " + msg
252 if pos == p.pos {
253 // the error happened at the current position;
254 // make the error message more specific
255 switch {
256 case p.tok == token.SEMICOLON && p.lit == "\n":
257 msg += ", found newline"
258 case p.tok.IsLiteral():
259 // print 123 rather than 'INT', etc.
260 msg += ", found " + p.lit
261 default:
262 msg += ", found '" + p.tok.String() + "'"
265 p.error(pos, msg)
268 func (p *parser) expect(tok token.Token) token.Pos {
269 pos := p.pos
270 if p.tok != tok {
271 p.errorExpected(pos, "'"+tok.String()+"'")
273 p.next() // make progress
274 return pos
277 // expect2 is like expect, but it returns an invalid position
278 // if the expected token is not found.
279 func (p *parser) expect2(tok token.Token) (pos token.Pos) {
280 if p.tok == tok {
281 pos = p.pos
282 } else {
283 p.errorExpected(p.pos, "'"+tok.String()+"'")
285 p.next() // make progress
286 return
289 // expectClosing is like expect but provides a better error message
290 // for the common case of a missing comma before a newline.
292 func (p *parser) expectClosing(tok token.Token, context string) token.Pos {
293 if p.tok != tok && p.tok == token.SEMICOLON && p.lit == "\n" {
294 p.error(p.pos, "missing ',' before newline in "+context)
295 p.next()
297 return p.expect(tok)
300 func (p *parser) expectSemi() {
301 // semicolon is optional before a closing ')' or '}'
302 if p.tok != token.RPAREN && p.tok != token.RBRACE {
303 switch p.tok {
304 case token.COMMA:
305 // permit a ',' instead of a ';' but complain
306 p.errorExpected(p.pos, "';'")
307 fallthrough
308 case token.SEMICOLON:
309 p.next()
310 default:
311 p.errorExpected(p.pos, "';'")
312 p.advance(stmtStart)
317 func (p *parser) atComma(context string, follow token.Token) bool {
318 if p.tok == token.COMMA {
319 return true
321 if p.tok != follow {
322 msg := "missing ','"
323 if p.tok == token.SEMICOLON && p.lit == "\n" {
324 msg += " before newline"
326 p.error(p.pos, msg+" in "+context)
327 return true // "insert" comma and continue
329 return false
332 func assert(cond bool, msg string) {
333 if !cond {
334 panic("go/parser internal error: " + msg)
338 // advance consumes tokens until the current token p.tok
339 // is in the 'to' set, or token.EOF. For error recovery.
340 func (p *parser) advance(to map[token.Token]bool) {
341 for ; p.tok != token.EOF; p.next() {
342 if to[p.tok] {
343 // Return only if parser made some progress since last
344 // sync or if it has not reached 10 advance calls without
345 // progress. Otherwise consume at least one token to
346 // avoid an endless parser loop (it is possible that
347 // both parseOperand and parseStmt call advance and
348 // correctly do not advance, thus the need for the
349 // invocation limit p.syncCnt).
350 if p.pos == p.syncPos && p.syncCnt < 10 {
351 p.syncCnt++
352 return
354 if p.pos > p.syncPos {
355 p.syncPos = p.pos
356 p.syncCnt = 0
357 return
359 // Reaching here indicates a parser bug, likely an
360 // incorrect token list in this function, but it only
361 // leads to skipping of possibly correct code if a
362 // previous error is present, and thus is preferred
363 // over a non-terminating parse.
368 var stmtStart = map[token.Token]bool{
369 token.BREAK: true,
370 token.CONST: true,
371 token.CONTINUE: true,
372 token.DEFER: true,
373 token.FALLTHROUGH: true,
374 token.FOR: true,
375 token.GO: true,
376 token.GOTO: true,
377 token.IF: true,
378 token.RETURN: true,
379 token.SELECT: true,
380 token.SWITCH: true,
381 token.TYPE: true,
382 token.VAR: true,
385 var declStart = map[token.Token]bool{
386 token.CONST: true,
387 token.TYPE: true,
388 token.VAR: true,
391 var exprEnd = map[token.Token]bool{
392 token.COMMA: true,
393 token.COLON: true,
394 token.SEMICOLON: true,
395 token.RPAREN: true,
396 token.RBRACK: true,
397 token.RBRACE: true,
400 // safePos returns a valid file position for a given position: If pos
401 // is valid to begin with, safePos returns pos. If pos is out-of-range,
402 // safePos returns the EOF position.
404 // This is hack to work around "artificial" end positions in the AST which
405 // are computed by adding 1 to (presumably valid) token positions. If the
406 // token positions are invalid due to parse errors, the resulting end position
407 // may be past the file's EOF position, which would lead to panics if used
408 // later on.
410 func (p *parser) safePos(pos token.Pos) (res token.Pos) {
411 defer func() {
412 if recover() != nil {
413 res = token.Pos(p.file.Base() + p.file.Size()) // EOF position
416 _ = p.file.Offset(pos) // trigger a panic if position is out-of-range
417 return pos
420 // ----------------------------------------------------------------------------
421 // Identifiers
423 func (p *parser) parseIdent() *ast.Ident {
424 pos := p.pos
425 name := "_"
426 if p.tok == token.IDENT {
427 name = p.lit
428 p.next()
429 } else {
430 p.expect(token.IDENT) // use expect() error handling
432 return &ast.Ident{NamePos: pos, Name: name}
435 func (p *parser) parseIdentList() (list []*ast.Ident) {
436 if p.trace {
437 defer un(trace(p, "IdentList"))
440 list = append(list, p.parseIdent())
441 for p.tok == token.COMMA {
442 p.next()
443 list = append(list, p.parseIdent())
446 return
449 // ----------------------------------------------------------------------------
450 // Common productions
452 // If lhs is set, result list elements which are identifiers are not resolved.
453 func (p *parser) parseExprList() (list []ast.Expr) {
454 if p.trace {
455 defer un(trace(p, "ExpressionList"))
458 list = append(list, p.checkExpr(p.parseExpr()))
459 for p.tok == token.COMMA {
460 p.next()
461 list = append(list, p.checkExpr(p.parseExpr()))
464 return
467 func (p *parser) parseList(inRhs bool) []ast.Expr {
468 old := p.inRhs
469 p.inRhs = inRhs
470 list := p.parseExprList()
471 p.inRhs = old
472 return list
475 // ----------------------------------------------------------------------------
476 // Types
478 func (p *parser) parseType() ast.Expr {
479 if p.trace {
480 defer un(trace(p, "Type"))
483 typ := p.tryIdentOrType()
485 if typ == nil {
486 pos := p.pos
487 p.errorExpected(pos, "type")
488 p.advance(exprEnd)
489 return &ast.BadExpr{From: pos, To: p.pos}
492 return typ
495 func (p *parser) parseQualifiedIdent(ident *ast.Ident) ast.Expr {
496 if p.trace {
497 defer un(trace(p, "QualifiedIdent"))
500 typ := p.parseTypeName(ident)
501 if p.tok == token.LBRACK && p.allowGenerics() {
502 typ = p.parseTypeInstance(typ)
505 return typ
508 // If the result is an identifier, it is not resolved.
509 func (p *parser) parseTypeName(ident *ast.Ident) ast.Expr {
510 if p.trace {
511 defer un(trace(p, "TypeName"))
514 if ident == nil {
515 ident = p.parseIdent()
518 if p.tok == token.PERIOD {
519 // ident is a package name
520 p.next()
521 sel := p.parseIdent()
522 return &ast.SelectorExpr{X: ident, Sel: sel}
525 return ident
528 // "[" has already been consumed, and lbrack is its position.
529 // If len != nil it is the already consumed array length.
530 func (p *parser) parseArrayType(lbrack token.Pos, len ast.Expr) *ast.ArrayType {
531 if p.trace {
532 defer un(trace(p, "ArrayType"))
535 if len == nil {
536 p.exprLev++
537 // always permit ellipsis for more fault-tolerant parsing
538 if p.tok == token.ELLIPSIS {
539 len = &ast.Ellipsis{Ellipsis: p.pos}
540 p.next()
541 } else if p.tok != token.RBRACK {
542 len = p.parseRhs()
544 p.exprLev--
546 if p.tok == token.COMMA {
547 // Trailing commas are accepted in type parameter
548 // lists but not in array type declarations.
549 // Accept for better error handling but complain.
550 p.error(p.pos, "unexpected comma; expecting ]")
551 p.next()
553 p.expect(token.RBRACK)
554 elt := p.parseType()
555 return &ast.ArrayType{Lbrack: lbrack, Len: len, Elt: elt}
558 func (p *parser) parseArrayFieldOrTypeInstance(x *ast.Ident) (*ast.Ident, ast.Expr) {
559 if p.trace {
560 defer un(trace(p, "ArrayFieldOrTypeInstance"))
563 // TODO(gri) Should we allow a trailing comma in a type argument
564 // list such as T[P,]? (We do in parseTypeInstance).
565 lbrack := p.expect(token.LBRACK)
566 var args []ast.Expr
567 var firstComma token.Pos
568 // TODO(rfindley): consider changing parseRhsOrType so that this function variable
569 // is not needed.
570 argparser := p.parseRhsOrType
571 if !p.allowGenerics() {
572 argparser = p.parseRhs
574 if p.tok != token.RBRACK {
575 p.exprLev++
576 args = append(args, argparser())
577 for p.tok == token.COMMA {
578 if !firstComma.IsValid() {
579 firstComma = p.pos
581 p.next()
582 args = append(args, argparser())
584 p.exprLev--
586 rbrack := p.expect(token.RBRACK)
588 if len(args) == 0 {
589 // x []E
590 elt := p.parseType()
591 return x, &ast.ArrayType{Lbrack: lbrack, Elt: elt}
594 // x [P]E or x[P]
595 if len(args) == 1 {
596 elt := p.tryIdentOrType()
597 if elt != nil {
598 // x [P]E
599 return x, &ast.ArrayType{Lbrack: lbrack, Len: args[0], Elt: elt}
601 if !p.allowGenerics() {
602 p.error(rbrack, "missing element type in array type expression")
603 return nil, &ast.BadExpr{From: args[0].Pos(), To: args[0].End()}
607 if !p.allowGenerics() {
608 p.error(firstComma, "expected ']', found ','")
609 return x, &ast.BadExpr{From: args[0].Pos(), To: args[len(args)-1].End()}
612 // x[P], x[P1, P2], ...
613 return nil, typeparams.PackIndexExpr(x, lbrack, args, rbrack)
616 func (p *parser) parseFieldDecl() *ast.Field {
617 if p.trace {
618 defer un(trace(p, "FieldDecl"))
621 doc := p.leadComment
623 var names []*ast.Ident
624 var typ ast.Expr
625 if p.tok == token.IDENT {
626 name := p.parseIdent()
627 if p.tok == token.PERIOD || p.tok == token.STRING || p.tok == token.SEMICOLON || p.tok == token.RBRACE {
628 // embedded type
629 typ = name
630 if p.tok == token.PERIOD {
631 typ = p.parseQualifiedIdent(name)
633 } else {
634 // name1, name2, ... T
635 names = []*ast.Ident{name}
636 for p.tok == token.COMMA {
637 p.next()
638 names = append(names, p.parseIdent())
640 // Careful dance: We don't know if we have an embedded instantiated
641 // type T[P1, P2, ...] or a field T of array type []E or [P]E.
642 if len(names) == 1 && p.tok == token.LBRACK {
643 name, typ = p.parseArrayFieldOrTypeInstance(name)
644 if name == nil {
645 names = nil
647 } else {
648 // T P
649 typ = p.parseType()
652 } else {
653 // embedded, possibly generic type
654 // (using the enclosing parentheses to distinguish it from a named field declaration)
655 // TODO(rFindley) confirm that this doesn't allow parenthesized embedded type
656 typ = p.parseType()
659 var tag *ast.BasicLit
660 if p.tok == token.STRING {
661 tag = &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
662 p.next()
665 p.expectSemi() // call before accessing p.linecomment
667 field := &ast.Field{Doc: doc, Names: names, Type: typ, Tag: tag, Comment: p.lineComment}
668 return field
671 func (p *parser) parseStructType() *ast.StructType {
672 if p.trace {
673 defer un(trace(p, "StructType"))
676 pos := p.expect(token.STRUCT)
677 lbrace := p.expect(token.LBRACE)
678 var list []*ast.Field
679 for p.tok == token.IDENT || p.tok == token.MUL || p.tok == token.LPAREN {
680 // a field declaration cannot start with a '(' but we accept
681 // it here for more robust parsing and better error messages
682 // (parseFieldDecl will check and complain if necessary)
683 list = append(list, p.parseFieldDecl())
685 rbrace := p.expect(token.RBRACE)
687 return &ast.StructType{
688 Struct: pos,
689 Fields: &ast.FieldList{
690 Opening: lbrace,
691 List: list,
692 Closing: rbrace,
697 func (p *parser) parsePointerType() *ast.StarExpr {
698 if p.trace {
699 defer un(trace(p, "PointerType"))
702 star := p.expect(token.MUL)
703 base := p.parseType()
705 return &ast.StarExpr{Star: star, X: base}
708 func (p *parser) parseDotsType() *ast.Ellipsis {
709 if p.trace {
710 defer un(trace(p, "DotsType"))
713 pos := p.expect(token.ELLIPSIS)
714 elt := p.parseType()
716 return &ast.Ellipsis{Ellipsis: pos, Elt: elt}
719 type field struct {
720 name *ast.Ident
721 typ ast.Expr
724 func (p *parser) parseParamDecl(name *ast.Ident, typeSetsOK bool) (f field) {
725 // TODO(rFindley) refactor to be more similar to paramDeclOrNil in the syntax
726 // package
727 if p.trace {
728 defer un(trace(p, "ParamDeclOrNil"))
731 ptok := p.tok
732 if name != nil {
733 p.tok = token.IDENT // force token.IDENT case in switch below
734 } else if typeSetsOK && p.tok == token.TILDE {
735 // "~" ...
736 return field{nil, p.embeddedElem(nil)}
739 switch p.tok {
740 case token.IDENT:
741 // name
742 if name != nil {
743 f.name = name
744 p.tok = ptok
745 } else {
746 f.name = p.parseIdent()
748 switch p.tok {
749 case token.IDENT, token.MUL, token.ARROW, token.FUNC, token.CHAN, token.MAP, token.STRUCT, token.INTERFACE, token.LPAREN:
750 // name type
751 f.typ = p.parseType()
753 case token.LBRACK:
754 // name "[" type1, ..., typeN "]" or name "[" n "]" type
755 f.name, f.typ = p.parseArrayFieldOrTypeInstance(f.name)
757 case token.ELLIPSIS:
758 // name "..." type
759 f.typ = p.parseDotsType()
760 return // don't allow ...type "|" ...
762 case token.PERIOD:
763 // name "." ...
764 f.typ = p.parseQualifiedIdent(f.name)
765 f.name = nil
767 case token.TILDE:
768 if typeSetsOK {
769 f.typ = p.embeddedElem(nil)
770 return
773 case token.OR:
774 if typeSetsOK {
775 // name "|" typeset
776 f.typ = p.embeddedElem(f.name)
777 f.name = nil
778 return
782 case token.MUL, token.ARROW, token.FUNC, token.LBRACK, token.CHAN, token.MAP, token.STRUCT, token.INTERFACE, token.LPAREN:
783 // type
784 f.typ = p.parseType()
786 case token.ELLIPSIS:
787 // "..." type
788 // (always accepted)
789 f.typ = p.parseDotsType()
790 return // don't allow ...type "|" ...
792 default:
793 // TODO(rfindley): this looks incorrect in the case of type parameter
794 // lists.
795 p.errorExpected(p.pos, ")")
796 p.advance(exprEnd)
799 // [name] type "|"
800 if typeSetsOK && p.tok == token.OR && f.typ != nil {
801 f.typ = p.embeddedElem(f.typ)
804 return
807 func (p *parser) parseParameterList(name0 *ast.Ident, typ0 ast.Expr, closing token.Token) (params []*ast.Field) {
808 if p.trace {
809 defer un(trace(p, "ParameterList"))
812 // Type parameters are the only parameter list closed by ']'.
813 tparams := closing == token.RBRACK
814 // Type set notation is ok in type parameter lists.
815 typeSetsOK := tparams && p.allowTypeSets()
817 pos := p.pos
818 if name0 != nil {
819 pos = name0.Pos()
822 var list []field
823 var named int // number of parameters that have an explicit name and type
825 for name0 != nil || p.tok != closing && p.tok != token.EOF {
826 var par field
827 if typ0 != nil {
828 if typeSetsOK {
829 typ0 = p.embeddedElem(typ0)
831 par = field{name0, typ0}
832 } else {
833 par = p.parseParamDecl(name0, typeSetsOK)
835 name0 = nil // 1st name was consumed if present
836 typ0 = nil // 1st typ was consumed if present
837 if par.name != nil || par.typ != nil {
838 list = append(list, par)
839 if par.name != nil && par.typ != nil {
840 named++
843 if !p.atComma("parameter list", closing) {
844 break
846 p.next()
849 if len(list) == 0 {
850 return // not uncommon
853 // TODO(gri) parameter distribution and conversion to []*ast.Field
854 // can be combined and made more efficient
856 // distribute parameter types
857 if named == 0 {
858 // all unnamed => found names are type names
859 for i := 0; i < len(list); i++ {
860 par := &list[i]
861 if typ := par.name; typ != nil {
862 par.typ = typ
863 par.name = nil
866 if tparams {
867 p.error(pos, "all type parameters must be named")
869 } else if named != len(list) {
870 // some named => all must be named
871 ok := true
872 var typ ast.Expr
873 missingName := pos
874 for i := len(list) - 1; i >= 0; i-- {
875 if par := &list[i]; par.typ != nil {
876 typ = par.typ
877 if par.name == nil {
878 ok = false
879 missingName = par.typ.Pos()
880 n := ast.NewIdent("_")
881 n.NamePos = typ.Pos() // correct position
882 par.name = n
884 } else if typ != nil {
885 par.typ = typ
886 } else {
887 // par.typ == nil && typ == nil => we only have a par.name
888 ok = false
889 missingName = par.name.Pos()
890 par.typ = &ast.BadExpr{From: par.name.Pos(), To: p.pos}
893 if !ok {
894 if tparams {
895 p.error(missingName, "all type parameters must be named")
896 } else {
897 p.error(pos, "mixed named and unnamed parameters")
902 // convert list []*ast.Field
903 if named == 0 {
904 // parameter list consists of types only
905 for _, par := range list {
906 assert(par.typ != nil, "nil type in unnamed parameter list")
907 params = append(params, &ast.Field{Type: par.typ})
909 return
912 // parameter list consists of named parameters with types
913 var names []*ast.Ident
914 var typ ast.Expr
915 addParams := func() {
916 assert(typ != nil, "nil type in named parameter list")
917 field := &ast.Field{Names: names, Type: typ}
918 params = append(params, field)
919 names = nil
921 for _, par := range list {
922 if par.typ != typ {
923 if len(names) > 0 {
924 addParams()
926 typ = par.typ
928 names = append(names, par.name)
930 if len(names) > 0 {
931 addParams()
933 return
936 func (p *parser) parseParameters(acceptTParams bool) (tparams, params *ast.FieldList) {
937 if p.trace {
938 defer un(trace(p, "Parameters"))
941 if p.allowGenerics() && acceptTParams && p.tok == token.LBRACK {
942 opening := p.pos
943 p.next()
944 // [T any](params) syntax
945 list := p.parseParameterList(nil, nil, token.RBRACK)
946 rbrack := p.expect(token.RBRACK)
947 tparams = &ast.FieldList{Opening: opening, List: list, Closing: rbrack}
948 // Type parameter lists must not be empty.
949 if tparams.NumFields() == 0 {
950 p.error(tparams.Closing, "empty type parameter list")
951 tparams = nil // avoid follow-on errors
955 opening := p.expect(token.LPAREN)
957 var fields []*ast.Field
958 if p.tok != token.RPAREN {
959 fields = p.parseParameterList(nil, nil, token.RPAREN)
962 rparen := p.expect(token.RPAREN)
963 params = &ast.FieldList{Opening: opening, List: fields, Closing: rparen}
965 return
968 func (p *parser) parseResult() *ast.FieldList {
969 if p.trace {
970 defer un(trace(p, "Result"))
973 if p.tok == token.LPAREN {
974 _, results := p.parseParameters(false)
975 return results
978 typ := p.tryIdentOrType()
979 if typ != nil {
980 list := make([]*ast.Field, 1)
981 list[0] = &ast.Field{Type: typ}
982 return &ast.FieldList{List: list}
985 return nil
988 func (p *parser) parseFuncType() *ast.FuncType {
989 if p.trace {
990 defer un(trace(p, "FuncType"))
993 pos := p.expect(token.FUNC)
994 tparams, params := p.parseParameters(true)
995 if tparams != nil {
996 p.error(tparams.Pos(), "function type must have no type parameters")
998 results := p.parseResult()
1000 return &ast.FuncType{Func: pos, Params: params, Results: results}
1003 func (p *parser) parseMethodSpec() *ast.Field {
1004 if p.trace {
1005 defer un(trace(p, "MethodSpec"))
1008 doc := p.leadComment
1009 var idents []*ast.Ident
1010 var typ ast.Expr
1011 x := p.parseTypeName(nil)
1012 if ident, _ := x.(*ast.Ident); ident != nil {
1013 switch {
1014 case p.tok == token.LBRACK && p.allowGenerics():
1015 // generic method or embedded instantiated type
1016 lbrack := p.pos
1017 p.next()
1018 p.exprLev++
1019 x := p.parseExpr()
1020 p.exprLev--
1021 if name0, _ := x.(*ast.Ident); name0 != nil && p.tok != token.COMMA && p.tok != token.RBRACK {
1022 // generic method m[T any]
1024 // Interface methods do not have type parameters. We parse them for a
1025 // better error message and improved error recovery.
1026 _ = p.parseParameterList(name0, nil, token.RBRACK)
1027 _ = p.expect(token.RBRACK)
1028 p.error(lbrack, "interface method must have no type parameters")
1030 // TODO(rfindley) refactor to share code with parseFuncType.
1031 _, params := p.parseParameters(false)
1032 results := p.parseResult()
1033 idents = []*ast.Ident{ident}
1034 typ = &ast.FuncType{
1035 Func: token.NoPos,
1036 Params: params,
1037 Results: results,
1039 } else {
1040 // embedded instantiated type
1041 // TODO(rfindley) should resolve all identifiers in x.
1042 list := []ast.Expr{x}
1043 if p.atComma("type argument list", token.RBRACK) {
1044 p.exprLev++
1045 p.next()
1046 for p.tok != token.RBRACK && p.tok != token.EOF {
1047 list = append(list, p.parseType())
1048 if !p.atComma("type argument list", token.RBRACK) {
1049 break
1051 p.next()
1053 p.exprLev--
1055 rbrack := p.expectClosing(token.RBRACK, "type argument list")
1056 typ = typeparams.PackIndexExpr(ident, lbrack, list, rbrack)
1058 case p.tok == token.LPAREN:
1059 // ordinary method
1060 // TODO(rfindley) refactor to share code with parseFuncType.
1061 _, params := p.parseParameters(false)
1062 results := p.parseResult()
1063 idents = []*ast.Ident{ident}
1064 typ = &ast.FuncType{Func: token.NoPos, Params: params, Results: results}
1065 default:
1066 // embedded type
1067 typ = x
1069 } else {
1070 // embedded, possibly instantiated type
1071 typ = x
1072 if p.tok == token.LBRACK && p.allowGenerics() {
1073 // embedded instantiated interface
1074 typ = p.parseTypeInstance(typ)
1078 // Comment is added at the callsite: the field below may joined with
1079 // additional type specs using '|'.
1080 // TODO(rfindley) this should be refactored.
1081 // TODO(rfindley) add more tests for comment handling.
1082 return &ast.Field{Doc: doc, Names: idents, Type: typ}
1085 func (p *parser) embeddedElem(x ast.Expr) ast.Expr {
1086 if p.trace {
1087 defer un(trace(p, "EmbeddedElem"))
1089 if x == nil {
1090 x = p.embeddedTerm()
1092 for p.tok == token.OR {
1093 t := new(ast.BinaryExpr)
1094 t.OpPos = p.pos
1095 t.Op = token.OR
1096 p.next()
1097 t.X = x
1098 t.Y = p.embeddedTerm()
1099 x = t
1101 return x
1104 func (p *parser) embeddedTerm() ast.Expr {
1105 if p.trace {
1106 defer un(trace(p, "EmbeddedTerm"))
1108 if p.tok == token.TILDE {
1109 t := new(ast.UnaryExpr)
1110 t.OpPos = p.pos
1111 t.Op = token.TILDE
1112 p.next()
1113 t.X = p.parseType()
1114 return t
1117 t := p.tryIdentOrType()
1118 if t == nil {
1119 pos := p.pos
1120 p.errorExpected(pos, "~ term or type")
1121 p.advance(exprEnd)
1122 return &ast.BadExpr{From: pos, To: p.pos}
1125 return t
1128 func (p *parser) parseInterfaceType() *ast.InterfaceType {
1129 if p.trace {
1130 defer un(trace(p, "InterfaceType"))
1133 pos := p.expect(token.INTERFACE)
1134 lbrace := p.expect(token.LBRACE)
1136 var list []*ast.Field
1138 parseElements:
1139 for {
1140 switch {
1141 case p.tok == token.IDENT:
1142 f := p.parseMethodSpec()
1143 if f.Names == nil && p.allowGenerics() {
1144 f.Type = p.embeddedElem(f.Type)
1146 p.expectSemi()
1147 f.Comment = p.lineComment
1148 list = append(list, f)
1149 case p.tok == token.TILDE && p.allowGenerics():
1150 typ := p.embeddedElem(nil)
1151 p.expectSemi()
1152 comment := p.lineComment
1153 list = append(list, &ast.Field{Type: typ, Comment: comment})
1154 case p.allowGenerics():
1155 if t := p.tryIdentOrType(); t != nil {
1156 typ := p.embeddedElem(t)
1157 p.expectSemi()
1158 comment := p.lineComment
1159 list = append(list, &ast.Field{Type: typ, Comment: comment})
1160 } else {
1161 break parseElements
1163 default:
1164 break parseElements
1168 // TODO(rfindley): the error produced here could be improved, since we could
1169 // accept a identifier, 'type', or a '}' at this point.
1170 rbrace := p.expect(token.RBRACE)
1172 return &ast.InterfaceType{
1173 Interface: pos,
1174 Methods: &ast.FieldList{
1175 Opening: lbrace,
1176 List: list,
1177 Closing: rbrace,
1182 func (p *parser) parseMapType() *ast.MapType {
1183 if p.trace {
1184 defer un(trace(p, "MapType"))
1187 pos := p.expect(token.MAP)
1188 p.expect(token.LBRACK)
1189 key := p.parseType()
1190 p.expect(token.RBRACK)
1191 value := p.parseType()
1193 return &ast.MapType{Map: pos, Key: key, Value: value}
1196 func (p *parser) parseChanType() *ast.ChanType {
1197 if p.trace {
1198 defer un(trace(p, "ChanType"))
1201 pos := p.pos
1202 dir := ast.SEND | ast.RECV
1203 var arrow token.Pos
1204 if p.tok == token.CHAN {
1205 p.next()
1206 if p.tok == token.ARROW {
1207 arrow = p.pos
1208 p.next()
1209 dir = ast.SEND
1211 } else {
1212 arrow = p.expect(token.ARROW)
1213 p.expect(token.CHAN)
1214 dir = ast.RECV
1216 value := p.parseType()
1218 return &ast.ChanType{Begin: pos, Arrow: arrow, Dir: dir, Value: value}
1221 func (p *parser) parseTypeInstance(typ ast.Expr) ast.Expr {
1222 assert(p.allowGenerics(), "parseTypeInstance while not parsing type params")
1223 if p.trace {
1224 defer un(trace(p, "TypeInstance"))
1227 opening := p.expect(token.LBRACK)
1228 p.exprLev++
1229 var list []ast.Expr
1230 for p.tok != token.RBRACK && p.tok != token.EOF {
1231 list = append(list, p.parseType())
1232 if !p.atComma("type argument list", token.RBRACK) {
1233 break
1235 p.next()
1237 p.exprLev--
1239 closing := p.expectClosing(token.RBRACK, "type argument list")
1241 if len(list) == 0 {
1242 p.errorExpected(closing, "type argument list")
1243 return &ast.IndexExpr{
1244 X: typ,
1245 Lbrack: opening,
1246 Index: &ast.BadExpr{From: opening + 1, To: closing},
1247 Rbrack: closing,
1251 return typeparams.PackIndexExpr(typ, opening, list, closing)
1254 func (p *parser) tryIdentOrType() ast.Expr {
1255 switch p.tok {
1256 case token.IDENT:
1257 typ := p.parseTypeName(nil)
1258 if p.tok == token.LBRACK && p.allowGenerics() {
1259 typ = p.parseTypeInstance(typ)
1261 return typ
1262 case token.LBRACK:
1263 lbrack := p.expect(token.LBRACK)
1264 return p.parseArrayType(lbrack, nil)
1265 case token.STRUCT:
1266 return p.parseStructType()
1267 case token.MUL:
1268 return p.parsePointerType()
1269 case token.FUNC:
1270 typ := p.parseFuncType()
1271 return typ
1272 case token.INTERFACE:
1273 return p.parseInterfaceType()
1274 case token.MAP:
1275 return p.parseMapType()
1276 case token.CHAN, token.ARROW:
1277 return p.parseChanType()
1278 case token.LPAREN:
1279 lparen := p.pos
1280 p.next()
1281 typ := p.parseType()
1282 rparen := p.expect(token.RPAREN)
1283 return &ast.ParenExpr{Lparen: lparen, X: typ, Rparen: rparen}
1286 // no type found
1287 return nil
1290 // ----------------------------------------------------------------------------
1291 // Blocks
1293 func (p *parser) parseStmtList() (list []ast.Stmt) {
1294 if p.trace {
1295 defer un(trace(p, "StatementList"))
1298 for p.tok != token.CASE && p.tok != token.DEFAULT && p.tok != token.RBRACE && p.tok != token.EOF {
1299 list = append(list, p.parseStmt())
1302 return
1305 func (p *parser) parseBody() *ast.BlockStmt {
1306 if p.trace {
1307 defer un(trace(p, "Body"))
1310 lbrace := p.expect(token.LBRACE)
1311 list := p.parseStmtList()
1312 rbrace := p.expect2(token.RBRACE)
1314 return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
1317 func (p *parser) parseBlockStmt() *ast.BlockStmt {
1318 if p.trace {
1319 defer un(trace(p, "BlockStmt"))
1322 lbrace := p.expect(token.LBRACE)
1323 list := p.parseStmtList()
1324 rbrace := p.expect2(token.RBRACE)
1326 return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
1329 // ----------------------------------------------------------------------------
1330 // Expressions
1332 func (p *parser) parseFuncTypeOrLit() ast.Expr {
1333 if p.trace {
1334 defer un(trace(p, "FuncTypeOrLit"))
1337 typ := p.parseFuncType()
1338 if p.tok != token.LBRACE {
1339 // function type only
1340 return typ
1343 p.exprLev++
1344 body := p.parseBody()
1345 p.exprLev--
1347 return &ast.FuncLit{Type: typ, Body: body}
1350 // parseOperand may return an expression or a raw type (incl. array
1351 // types of the form [...]T. Callers must verify the result.
1353 func (p *parser) parseOperand() ast.Expr {
1354 if p.trace {
1355 defer un(trace(p, "Operand"))
1358 switch p.tok {
1359 case token.IDENT:
1360 x := p.parseIdent()
1361 return x
1363 case token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING:
1364 x := &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
1365 p.next()
1366 return x
1368 case token.LPAREN:
1369 lparen := p.pos
1370 p.next()
1371 p.exprLev++
1372 x := p.parseRhsOrType() // types may be parenthesized: (some type)
1373 p.exprLev--
1374 rparen := p.expect(token.RPAREN)
1375 return &ast.ParenExpr{Lparen: lparen, X: x, Rparen: rparen}
1377 case token.FUNC:
1378 return p.parseFuncTypeOrLit()
1381 if typ := p.tryIdentOrType(); typ != nil { // do not consume trailing type parameters
1382 // could be type for composite literal or conversion
1383 _, isIdent := typ.(*ast.Ident)
1384 assert(!isIdent, "type cannot be identifier")
1385 return typ
1388 // we have an error
1389 pos := p.pos
1390 p.errorExpected(pos, "operand")
1391 p.advance(stmtStart)
1392 return &ast.BadExpr{From: pos, To: p.pos}
1395 func (p *parser) parseSelector(x ast.Expr) ast.Expr {
1396 if p.trace {
1397 defer un(trace(p, "Selector"))
1400 sel := p.parseIdent()
1402 return &ast.SelectorExpr{X: x, Sel: sel}
1405 func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr {
1406 if p.trace {
1407 defer un(trace(p, "TypeAssertion"))
1410 lparen := p.expect(token.LPAREN)
1411 var typ ast.Expr
1412 if p.tok == token.TYPE {
1413 // type switch: typ == nil
1414 p.next()
1415 } else {
1416 typ = p.parseType()
1418 rparen := p.expect(token.RPAREN)
1420 return &ast.TypeAssertExpr{X: x, Type: typ, Lparen: lparen, Rparen: rparen}
1423 func (p *parser) parseIndexOrSliceOrInstance(x ast.Expr) ast.Expr {
1424 if p.trace {
1425 defer un(trace(p, "parseIndexOrSliceOrInstance"))
1428 lbrack := p.expect(token.LBRACK)
1429 if p.tok == token.RBRACK {
1430 // empty index, slice or index expressions are not permitted;
1431 // accept them for parsing tolerance, but complain
1432 p.errorExpected(p.pos, "operand")
1433 rbrack := p.pos
1434 p.next()
1435 return &ast.IndexExpr{
1436 X: x,
1437 Lbrack: lbrack,
1438 Index: &ast.BadExpr{From: rbrack, To: rbrack},
1439 Rbrack: rbrack,
1442 p.exprLev++
1444 const N = 3 // change the 3 to 2 to disable 3-index slices
1445 var args []ast.Expr
1446 var index [N]ast.Expr
1447 var colons [N - 1]token.Pos
1448 var firstComma token.Pos
1449 if p.tok != token.COLON {
1450 // We can't know if we have an index expression or a type instantiation;
1451 // so even if we see a (named) type we are not going to be in type context.
1452 index[0] = p.parseRhsOrType()
1454 ncolons := 0
1455 switch p.tok {
1456 case token.COLON:
1457 // slice expression
1458 for p.tok == token.COLON && ncolons < len(colons) {
1459 colons[ncolons] = p.pos
1460 ncolons++
1461 p.next()
1462 if p.tok != token.COLON && p.tok != token.RBRACK && p.tok != token.EOF {
1463 index[ncolons] = p.parseRhs()
1466 case token.COMMA:
1467 firstComma = p.pos
1468 // instance expression
1469 args = append(args, index[0])
1470 for p.tok == token.COMMA {
1471 p.next()
1472 if p.tok != token.RBRACK && p.tok != token.EOF {
1473 args = append(args, p.parseType())
1478 p.exprLev--
1479 rbrack := p.expect(token.RBRACK)
1481 if ncolons > 0 {
1482 // slice expression
1483 slice3 := false
1484 if ncolons == 2 {
1485 slice3 = true
1486 // Check presence of 2nd and 3rd index here rather than during type-checking
1487 // to prevent erroneous programs from passing through gofmt (was issue 7305).
1488 if index[1] == nil {
1489 p.error(colons[0], "2nd index required in 3-index slice")
1490 index[1] = &ast.BadExpr{From: colons[0] + 1, To: colons[1]}
1492 if index[2] == nil {
1493 p.error(colons[1], "3rd index required in 3-index slice")
1494 index[2] = &ast.BadExpr{From: colons[1] + 1, To: rbrack}
1497 return &ast.SliceExpr{X: x, Lbrack: lbrack, Low: index[0], High: index[1], Max: index[2], Slice3: slice3, Rbrack: rbrack}
1500 if len(args) == 0 {
1501 // index expression
1502 return &ast.IndexExpr{X: x, Lbrack: lbrack, Index: index[0], Rbrack: rbrack}
1505 if !p.allowGenerics() {
1506 p.error(firstComma, "expected ']' or ':', found ','")
1507 return &ast.BadExpr{From: args[0].Pos(), To: args[len(args)-1].End()}
1510 // instance expression
1511 return typeparams.PackIndexExpr(x, lbrack, args, rbrack)
1514 func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr {
1515 if p.trace {
1516 defer un(trace(p, "CallOrConversion"))
1519 lparen := p.expect(token.LPAREN)
1520 p.exprLev++
1521 var list []ast.Expr
1522 var ellipsis token.Pos
1523 for p.tok != token.RPAREN && p.tok != token.EOF && !ellipsis.IsValid() {
1524 list = append(list, p.parseRhsOrType()) // builtins may expect a type: make(some type, ...)
1525 if p.tok == token.ELLIPSIS {
1526 ellipsis = p.pos
1527 p.next()
1529 if !p.atComma("argument list", token.RPAREN) {
1530 break
1532 p.next()
1534 p.exprLev--
1535 rparen := p.expectClosing(token.RPAREN, "argument list")
1537 return &ast.CallExpr{Fun: fun, Lparen: lparen, Args: list, Ellipsis: ellipsis, Rparen: rparen}
1540 func (p *parser) parseValue() ast.Expr {
1541 if p.trace {
1542 defer un(trace(p, "Element"))
1545 if p.tok == token.LBRACE {
1546 return p.parseLiteralValue(nil)
1549 x := p.checkExpr(p.parseExpr())
1551 return x
1554 func (p *parser) parseElement() ast.Expr {
1555 if p.trace {
1556 defer un(trace(p, "Element"))
1559 x := p.parseValue()
1560 if p.tok == token.COLON {
1561 colon := p.pos
1562 p.next()
1563 x = &ast.KeyValueExpr{Key: x, Colon: colon, Value: p.parseValue()}
1566 return x
1569 func (p *parser) parseElementList() (list []ast.Expr) {
1570 if p.trace {
1571 defer un(trace(p, "ElementList"))
1574 for p.tok != token.RBRACE && p.tok != token.EOF {
1575 list = append(list, p.parseElement())
1576 if !p.atComma("composite literal", token.RBRACE) {
1577 break
1579 p.next()
1582 return
1585 func (p *parser) parseLiteralValue(typ ast.Expr) ast.Expr {
1586 if p.trace {
1587 defer un(trace(p, "LiteralValue"))
1590 lbrace := p.expect(token.LBRACE)
1591 var elts []ast.Expr
1592 p.exprLev++
1593 if p.tok != token.RBRACE {
1594 elts = p.parseElementList()
1596 p.exprLev--
1597 rbrace := p.expectClosing(token.RBRACE, "composite literal")
1598 return &ast.CompositeLit{Type: typ, Lbrace: lbrace, Elts: elts, Rbrace: rbrace}
1601 // checkExpr checks that x is an expression (and not a type).
1602 func (p *parser) checkExpr(x ast.Expr) ast.Expr {
1603 switch unparen(x).(type) {
1604 case *ast.BadExpr:
1605 case *ast.Ident:
1606 case *ast.BasicLit:
1607 case *ast.FuncLit:
1608 case *ast.CompositeLit:
1609 case *ast.ParenExpr:
1610 panic("unreachable")
1611 case *ast.SelectorExpr:
1612 case *ast.IndexExpr:
1613 case *ast.IndexListExpr:
1614 case *ast.SliceExpr:
1615 case *ast.TypeAssertExpr:
1616 // If t.Type == nil we have a type assertion of the form
1617 // y.(type), which is only allowed in type switch expressions.
1618 // It's hard to exclude those but for the case where we are in
1619 // a type switch. Instead be lenient and test this in the type
1620 // checker.
1621 case *ast.CallExpr:
1622 case *ast.StarExpr:
1623 case *ast.UnaryExpr:
1624 case *ast.BinaryExpr:
1625 default:
1626 // all other nodes are not proper expressions
1627 p.errorExpected(x.Pos(), "expression")
1628 x = &ast.BadExpr{From: x.Pos(), To: p.safePos(x.End())}
1630 return x
1633 // If x is of the form (T), unparen returns unparen(T), otherwise it returns x.
1634 func unparen(x ast.Expr) ast.Expr {
1635 if p, isParen := x.(*ast.ParenExpr); isParen {
1636 x = unparen(p.X)
1638 return x
1641 // checkExprOrType checks that x is an expression or a type
1642 // (and not a raw type such as [...]T).
1644 func (p *parser) checkExprOrType(x ast.Expr) ast.Expr {
1645 switch t := unparen(x).(type) {
1646 case *ast.ParenExpr:
1647 panic("unreachable")
1648 case *ast.ArrayType:
1649 if len, isEllipsis := t.Len.(*ast.Ellipsis); isEllipsis {
1650 p.error(len.Pos(), "expected array length, found '...'")
1651 x = &ast.BadExpr{From: x.Pos(), To: p.safePos(x.End())}
1655 // all other nodes are expressions or types
1656 return x
1659 func (p *parser) parsePrimaryExpr(x ast.Expr) ast.Expr {
1660 if p.trace {
1661 defer un(trace(p, "PrimaryExpr"))
1664 if x == nil {
1665 x = p.parseOperand()
1667 for {
1668 switch p.tok {
1669 case token.PERIOD:
1670 p.next()
1671 switch p.tok {
1672 case token.IDENT:
1673 x = p.parseSelector(p.checkExprOrType(x))
1674 case token.LPAREN:
1675 x = p.parseTypeAssertion(p.checkExpr(x))
1676 default:
1677 pos := p.pos
1678 p.errorExpected(pos, "selector or type assertion")
1679 // TODO(rFindley) The check for token.RBRACE below is a targeted fix
1680 // to error recovery sufficient to make the x/tools tests to
1681 // pass with the new parsing logic introduced for type
1682 // parameters. Remove this once error recovery has been
1683 // more generally reconsidered.
1684 if p.tok != token.RBRACE {
1685 p.next() // make progress
1687 sel := &ast.Ident{NamePos: pos, Name: "_"}
1688 x = &ast.SelectorExpr{X: x, Sel: sel}
1690 case token.LBRACK:
1691 x = p.parseIndexOrSliceOrInstance(p.checkExpr(x))
1692 case token.LPAREN:
1693 x = p.parseCallOrConversion(p.checkExprOrType(x))
1694 case token.LBRACE:
1695 // operand may have returned a parenthesized complit
1696 // type; accept it but complain if we have a complit
1697 t := unparen(x)
1698 // determine if '{' belongs to a composite literal or a block statement
1699 switch t.(type) {
1700 case *ast.BadExpr, *ast.Ident, *ast.SelectorExpr:
1701 if p.exprLev < 0 {
1702 return x
1704 // x is possibly a composite literal type
1705 case *ast.IndexExpr, *ast.IndexListExpr:
1706 if p.exprLev < 0 {
1707 return x
1709 // x is possibly a composite literal type
1710 case *ast.ArrayType, *ast.StructType, *ast.MapType:
1711 // x is a composite literal type
1712 default:
1713 return x
1715 if t != x {
1716 p.error(t.Pos(), "cannot parenthesize type in composite literal")
1717 // already progressed, no need to advance
1719 x = p.parseLiteralValue(x)
1720 default:
1721 return x
1726 func (p *parser) parseUnaryExpr() ast.Expr {
1727 if p.trace {
1728 defer un(trace(p, "UnaryExpr"))
1731 switch p.tok {
1732 case token.ADD, token.SUB, token.NOT, token.XOR, token.AND:
1733 pos, op := p.pos, p.tok
1734 p.next()
1735 x := p.parseUnaryExpr()
1736 return &ast.UnaryExpr{OpPos: pos, Op: op, X: p.checkExpr(x)}
1738 case token.ARROW:
1739 // channel type or receive expression
1740 arrow := p.pos
1741 p.next()
1743 // If the next token is token.CHAN we still don't know if it
1744 // is a channel type or a receive operation - we only know
1745 // once we have found the end of the unary expression. There
1746 // are two cases:
1748 // <- type => (<-type) must be channel type
1749 // <- expr => <-(expr) is a receive from an expression
1751 // In the first case, the arrow must be re-associated with
1752 // the channel type parsed already:
1754 // <- (chan type) => (<-chan type)
1755 // <- (chan<- type) => (<-chan (<-type))
1757 x := p.parseUnaryExpr()
1759 // determine which case we have
1760 if typ, ok := x.(*ast.ChanType); ok {
1761 // (<-type)
1763 // re-associate position info and <-
1764 dir := ast.SEND
1765 for ok && dir == ast.SEND {
1766 if typ.Dir == ast.RECV {
1767 // error: (<-type) is (<-(<-chan T))
1768 p.errorExpected(typ.Arrow, "'chan'")
1770 arrow, typ.Begin, typ.Arrow = typ.Arrow, arrow, arrow
1771 dir, typ.Dir = typ.Dir, ast.RECV
1772 typ, ok = typ.Value.(*ast.ChanType)
1774 if dir == ast.SEND {
1775 p.errorExpected(arrow, "channel type")
1778 return x
1781 // <-(expr)
1782 return &ast.UnaryExpr{OpPos: arrow, Op: token.ARROW, X: p.checkExpr(x)}
1784 case token.MUL:
1785 // pointer type or unary "*" expression
1786 pos := p.pos
1787 p.next()
1788 x := p.parseUnaryExpr()
1789 return &ast.StarExpr{Star: pos, X: p.checkExprOrType(x)}
1792 return p.parsePrimaryExpr(nil)
1795 func (p *parser) tokPrec() (token.Token, int) {
1796 tok := p.tok
1797 if p.inRhs && tok == token.ASSIGN {
1798 tok = token.EQL
1800 return tok, tok.Precedence()
1803 // parseBinaryExpr parses a (possibly) binary expression.
1804 // If x is non-nil, it is used as the left operand.
1805 // If check is true, operands are checked to be valid expressions.
1807 // TODO(rfindley): parseBinaryExpr has become overloaded. Consider refactoring.
1808 func (p *parser) parseBinaryExpr(x ast.Expr, prec1 int, check bool) ast.Expr {
1809 if p.trace {
1810 defer un(trace(p, "BinaryExpr"))
1813 if x == nil {
1814 x = p.parseUnaryExpr()
1816 for {
1817 op, oprec := p.tokPrec()
1818 if oprec < prec1 {
1819 return x
1821 pos := p.expect(op)
1822 y := p.parseBinaryExpr(nil, oprec+1, check)
1823 if check {
1824 x = p.checkExpr(x)
1825 y = p.checkExpr(y)
1827 x = &ast.BinaryExpr{X: x, OpPos: pos, Op: op, Y: y}
1831 // checkBinaryExpr checks binary expressions that were not already checked by
1832 // parseBinaryExpr, because the latter was called with check=false.
1833 func (p *parser) checkBinaryExpr(x ast.Expr) {
1834 bx, ok := x.(*ast.BinaryExpr)
1835 if !ok {
1836 return
1839 bx.X = p.checkExpr(bx.X)
1840 bx.Y = p.checkExpr(bx.Y)
1842 // parseBinaryExpr checks x and y for each binary expr in a tree, so we
1843 // traverse the tree of binary exprs starting from x.
1844 p.checkBinaryExpr(bx.X)
1845 p.checkBinaryExpr(bx.Y)
1848 // The result may be a type or even a raw type ([...]int). Callers must
1849 // check the result (using checkExpr or checkExprOrType), depending on
1850 // context.
1851 func (p *parser) parseExpr() ast.Expr {
1852 if p.trace {
1853 defer un(trace(p, "Expression"))
1856 return p.parseBinaryExpr(nil, token.LowestPrec+1, true)
1859 func (p *parser) parseRhs() ast.Expr {
1860 old := p.inRhs
1861 p.inRhs = true
1862 x := p.checkExpr(p.parseExpr())
1863 p.inRhs = old
1864 return x
1867 func (p *parser) parseRhsOrType() ast.Expr {
1868 old := p.inRhs
1869 p.inRhs = true
1870 x := p.checkExprOrType(p.parseExpr())
1871 p.inRhs = old
1872 return x
1875 // ----------------------------------------------------------------------------
1876 // Statements
1878 // Parsing modes for parseSimpleStmt.
1879 const (
1880 basic = iota
1881 labelOk
1882 rangeOk
1885 // parseSimpleStmt returns true as 2nd result if it parsed the assignment
1886 // of a range clause (with mode == rangeOk). The returned statement is an
1887 // assignment with a right-hand side that is a single unary expression of
1888 // the form "range x". No guarantees are given for the left-hand side.
1889 func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) {
1890 if p.trace {
1891 defer un(trace(p, "SimpleStmt"))
1894 x := p.parseList(false)
1896 switch p.tok {
1897 case
1898 token.DEFINE, token.ASSIGN, token.ADD_ASSIGN,
1899 token.SUB_ASSIGN, token.MUL_ASSIGN, token.QUO_ASSIGN,
1900 token.REM_ASSIGN, token.AND_ASSIGN, token.OR_ASSIGN,
1901 token.XOR_ASSIGN, token.SHL_ASSIGN, token.SHR_ASSIGN, token.AND_NOT_ASSIGN:
1902 // assignment statement, possibly part of a range clause
1903 pos, tok := p.pos, p.tok
1904 p.next()
1905 var y []ast.Expr
1906 isRange := false
1907 if mode == rangeOk && p.tok == token.RANGE && (tok == token.DEFINE || tok == token.ASSIGN) {
1908 pos := p.pos
1909 p.next()
1910 y = []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
1911 isRange = true
1912 } else {
1913 y = p.parseList(true)
1915 as := &ast.AssignStmt{Lhs: x, TokPos: pos, Tok: tok, Rhs: y}
1916 if tok == token.DEFINE {
1917 p.checkAssignStmt(as)
1919 return as, isRange
1922 if len(x) > 1 {
1923 p.errorExpected(x[0].Pos(), "1 expression")
1924 // continue with first expression
1927 switch p.tok {
1928 case token.COLON:
1929 // labeled statement
1930 colon := p.pos
1931 p.next()
1932 if label, isIdent := x[0].(*ast.Ident); mode == labelOk && isIdent {
1933 // Go spec: The scope of a label is the body of the function
1934 // in which it is declared and excludes the body of any nested
1935 // function.
1936 stmt := &ast.LabeledStmt{Label: label, Colon: colon, Stmt: p.parseStmt()}
1937 return stmt, false
1939 // The label declaration typically starts at x[0].Pos(), but the label
1940 // declaration may be erroneous due to a token after that position (and
1941 // before the ':'). If SpuriousErrors is not set, the (only) error
1942 // reported for the line is the illegal label error instead of the token
1943 // before the ':' that caused the problem. Thus, use the (latest) colon
1944 // position for error reporting.
1945 p.error(colon, "illegal label declaration")
1946 return &ast.BadStmt{From: x[0].Pos(), To: colon + 1}, false
1948 case token.ARROW:
1949 // send statement
1950 arrow := p.pos
1951 p.next()
1952 y := p.parseRhs()
1953 return &ast.SendStmt{Chan: x[0], Arrow: arrow, Value: y}, false
1955 case token.INC, token.DEC:
1956 // increment or decrement
1957 s := &ast.IncDecStmt{X: x[0], TokPos: p.pos, Tok: p.tok}
1958 p.next()
1959 return s, false
1962 // expression
1963 return &ast.ExprStmt{X: x[0]}, false
1966 func (p *parser) checkAssignStmt(as *ast.AssignStmt) {
1967 for _, x := range as.Lhs {
1968 if _, isIdent := x.(*ast.Ident); !isIdent {
1969 p.errorExpected(x.Pos(), "identifier on left side of :=")
1974 func (p *parser) parseCallExpr(callType string) *ast.CallExpr {
1975 x := p.parseRhsOrType() // could be a conversion: (some type)(x)
1976 if call, isCall := x.(*ast.CallExpr); isCall {
1977 return call
1979 if _, isBad := x.(*ast.BadExpr); !isBad {
1980 // only report error if it's a new one
1981 p.error(p.safePos(x.End()), fmt.Sprintf("function must be invoked in %s statement", callType))
1983 return nil
1986 func (p *parser) parseGoStmt() ast.Stmt {
1987 if p.trace {
1988 defer un(trace(p, "GoStmt"))
1991 pos := p.expect(token.GO)
1992 call := p.parseCallExpr("go")
1993 p.expectSemi()
1994 if call == nil {
1995 return &ast.BadStmt{From: pos, To: pos + 2} // len("go")
1998 return &ast.GoStmt{Go: pos, Call: call}
2001 func (p *parser) parseDeferStmt() ast.Stmt {
2002 if p.trace {
2003 defer un(trace(p, "DeferStmt"))
2006 pos := p.expect(token.DEFER)
2007 call := p.parseCallExpr("defer")
2008 p.expectSemi()
2009 if call == nil {
2010 return &ast.BadStmt{From: pos, To: pos + 5} // len("defer")
2013 return &ast.DeferStmt{Defer: pos, Call: call}
2016 func (p *parser) parseReturnStmt() *ast.ReturnStmt {
2017 if p.trace {
2018 defer un(trace(p, "ReturnStmt"))
2021 pos := p.pos
2022 p.expect(token.RETURN)
2023 var x []ast.Expr
2024 if p.tok != token.SEMICOLON && p.tok != token.RBRACE {
2025 x = p.parseList(true)
2027 p.expectSemi()
2029 return &ast.ReturnStmt{Return: pos, Results: x}
2032 func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt {
2033 if p.trace {
2034 defer un(trace(p, "BranchStmt"))
2037 pos := p.expect(tok)
2038 var label *ast.Ident
2039 if tok != token.FALLTHROUGH && p.tok == token.IDENT {
2040 label = p.parseIdent()
2042 p.expectSemi()
2044 return &ast.BranchStmt{TokPos: pos, Tok: tok, Label: label}
2047 func (p *parser) makeExpr(s ast.Stmt, want string) ast.Expr {
2048 if s == nil {
2049 return nil
2051 if es, isExpr := s.(*ast.ExprStmt); isExpr {
2052 return p.checkExpr(es.X)
2054 found := "simple statement"
2055 if _, isAss := s.(*ast.AssignStmt); isAss {
2056 found = "assignment"
2058 p.error(s.Pos(), fmt.Sprintf("expected %s, found %s (missing parentheses around composite literal?)", want, found))
2059 return &ast.BadExpr{From: s.Pos(), To: p.safePos(s.End())}
2062 // parseIfHeader is an adjusted version of parser.header
2063 // in cmd/compile/internal/syntax/parser.go, which has
2064 // been tuned for better error handling.
2065 func (p *parser) parseIfHeader() (init ast.Stmt, cond ast.Expr) {
2066 if p.tok == token.LBRACE {
2067 p.error(p.pos, "missing condition in if statement")
2068 cond = &ast.BadExpr{From: p.pos, To: p.pos}
2069 return
2071 // p.tok != token.LBRACE
2073 prevLev := p.exprLev
2074 p.exprLev = -1
2076 if p.tok != token.SEMICOLON {
2077 // accept potential variable declaration but complain
2078 if p.tok == token.VAR {
2079 p.next()
2080 p.error(p.pos, "var declaration not allowed in 'IF' initializer")
2082 init, _ = p.parseSimpleStmt(basic)
2085 var condStmt ast.Stmt
2086 var semi struct {
2087 pos token.Pos
2088 lit string // ";" or "\n"; valid if pos.IsValid()
2090 if p.tok != token.LBRACE {
2091 if p.tok == token.SEMICOLON {
2092 semi.pos = p.pos
2093 semi.lit = p.lit
2094 p.next()
2095 } else {
2096 p.expect(token.SEMICOLON)
2098 if p.tok != token.LBRACE {
2099 condStmt, _ = p.parseSimpleStmt(basic)
2101 } else {
2102 condStmt = init
2103 init = nil
2106 if condStmt != nil {
2107 cond = p.makeExpr(condStmt, "boolean expression")
2108 } else if semi.pos.IsValid() {
2109 if semi.lit == "\n" {
2110 p.error(semi.pos, "unexpected newline, expecting { after if clause")
2111 } else {
2112 p.error(semi.pos, "missing condition in if statement")
2116 // make sure we have a valid AST
2117 if cond == nil {
2118 cond = &ast.BadExpr{From: p.pos, To: p.pos}
2121 p.exprLev = prevLev
2122 return
2125 func (p *parser) parseIfStmt() *ast.IfStmt {
2126 if p.trace {
2127 defer un(trace(p, "IfStmt"))
2130 pos := p.expect(token.IF)
2132 init, cond := p.parseIfHeader()
2133 body := p.parseBlockStmt()
2135 var else_ ast.Stmt
2136 if p.tok == token.ELSE {
2137 p.next()
2138 switch p.tok {
2139 case token.IF:
2140 else_ = p.parseIfStmt()
2141 case token.LBRACE:
2142 else_ = p.parseBlockStmt()
2143 p.expectSemi()
2144 default:
2145 p.errorExpected(p.pos, "if statement or block")
2146 else_ = &ast.BadStmt{From: p.pos, To: p.pos}
2148 } else {
2149 p.expectSemi()
2152 return &ast.IfStmt{If: pos, Init: init, Cond: cond, Body: body, Else: else_}
2155 func (p *parser) parseTypeList() (list []ast.Expr) {
2156 if p.trace {
2157 defer un(trace(p, "TypeList"))
2160 list = append(list, p.parseType())
2161 for p.tok == token.COMMA {
2162 p.next()
2163 list = append(list, p.parseType())
2166 return
2169 func (p *parser) parseCaseClause(typeSwitch bool) *ast.CaseClause {
2170 if p.trace {
2171 defer un(trace(p, "CaseClause"))
2174 pos := p.pos
2175 var list []ast.Expr
2176 if p.tok == token.CASE {
2177 p.next()
2178 if typeSwitch {
2179 list = p.parseTypeList()
2180 } else {
2181 list = p.parseList(true)
2183 } else {
2184 p.expect(token.DEFAULT)
2187 colon := p.expect(token.COLON)
2188 body := p.parseStmtList()
2190 return &ast.CaseClause{Case: pos, List: list, Colon: colon, Body: body}
2193 func isTypeSwitchAssert(x ast.Expr) bool {
2194 a, ok := x.(*ast.TypeAssertExpr)
2195 return ok && a.Type == nil
2198 func (p *parser) isTypeSwitchGuard(s ast.Stmt) bool {
2199 switch t := s.(type) {
2200 case *ast.ExprStmt:
2201 // x.(type)
2202 return isTypeSwitchAssert(t.X)
2203 case *ast.AssignStmt:
2204 // v := x.(type)
2205 if len(t.Lhs) == 1 && len(t.Rhs) == 1 && isTypeSwitchAssert(t.Rhs[0]) {
2206 switch t.Tok {
2207 case token.ASSIGN:
2208 // permit v = x.(type) but complain
2209 p.error(t.TokPos, "expected ':=', found '='")
2210 fallthrough
2211 case token.DEFINE:
2212 return true
2216 return false
2219 func (p *parser) parseSwitchStmt() ast.Stmt {
2220 if p.trace {
2221 defer un(trace(p, "SwitchStmt"))
2224 pos := p.expect(token.SWITCH)
2226 var s1, s2 ast.Stmt
2227 if p.tok != token.LBRACE {
2228 prevLev := p.exprLev
2229 p.exprLev = -1
2230 if p.tok != token.SEMICOLON {
2231 s2, _ = p.parseSimpleStmt(basic)
2233 if p.tok == token.SEMICOLON {
2234 p.next()
2235 s1 = s2
2236 s2 = nil
2237 if p.tok != token.LBRACE {
2238 // A TypeSwitchGuard may declare a variable in addition
2239 // to the variable declared in the initial SimpleStmt.
2240 // Introduce extra scope to avoid redeclaration errors:
2242 // switch t := 0; t := x.(T) { ... }
2244 // (this code is not valid Go because the first t
2245 // cannot be accessed and thus is never used, the extra
2246 // scope is needed for the correct error message).
2248 // If we don't have a type switch, s2 must be an expression.
2249 // Having the extra nested but empty scope won't affect it.
2250 s2, _ = p.parseSimpleStmt(basic)
2253 p.exprLev = prevLev
2256 typeSwitch := p.isTypeSwitchGuard(s2)
2257 lbrace := p.expect(token.LBRACE)
2258 var list []ast.Stmt
2259 for p.tok == token.CASE || p.tok == token.DEFAULT {
2260 list = append(list, p.parseCaseClause(typeSwitch))
2262 rbrace := p.expect(token.RBRACE)
2263 p.expectSemi()
2264 body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
2266 if typeSwitch {
2267 return &ast.TypeSwitchStmt{Switch: pos, Init: s1, Assign: s2, Body: body}
2270 return &ast.SwitchStmt{Switch: pos, Init: s1, Tag: p.makeExpr(s2, "switch expression"), Body: body}
2273 func (p *parser) parseCommClause() *ast.CommClause {
2274 if p.trace {
2275 defer un(trace(p, "CommClause"))
2278 pos := p.pos
2279 var comm ast.Stmt
2280 if p.tok == token.CASE {
2281 p.next()
2282 lhs := p.parseList(false)
2283 if p.tok == token.ARROW {
2284 // SendStmt
2285 if len(lhs) > 1 {
2286 p.errorExpected(lhs[0].Pos(), "1 expression")
2287 // continue with first expression
2289 arrow := p.pos
2290 p.next()
2291 rhs := p.parseRhs()
2292 comm = &ast.SendStmt{Chan: lhs[0], Arrow: arrow, Value: rhs}
2293 } else {
2294 // RecvStmt
2295 if tok := p.tok; tok == token.ASSIGN || tok == token.DEFINE {
2296 // RecvStmt with assignment
2297 if len(lhs) > 2 {
2298 p.errorExpected(lhs[0].Pos(), "1 or 2 expressions")
2299 // continue with first two expressions
2300 lhs = lhs[0:2]
2302 pos := p.pos
2303 p.next()
2304 rhs := p.parseRhs()
2305 as := &ast.AssignStmt{Lhs: lhs, TokPos: pos, Tok: tok, Rhs: []ast.Expr{rhs}}
2306 if tok == token.DEFINE {
2307 p.checkAssignStmt(as)
2309 comm = as
2310 } else {
2311 // lhs must be single receive operation
2312 if len(lhs) > 1 {
2313 p.errorExpected(lhs[0].Pos(), "1 expression")
2314 // continue with first expression
2316 comm = &ast.ExprStmt{X: lhs[0]}
2319 } else {
2320 p.expect(token.DEFAULT)
2323 colon := p.expect(token.COLON)
2324 body := p.parseStmtList()
2326 return &ast.CommClause{Case: pos, Comm: comm, Colon: colon, Body: body}
2329 func (p *parser) parseSelectStmt() *ast.SelectStmt {
2330 if p.trace {
2331 defer un(trace(p, "SelectStmt"))
2334 pos := p.expect(token.SELECT)
2335 lbrace := p.expect(token.LBRACE)
2336 var list []ast.Stmt
2337 for p.tok == token.CASE || p.tok == token.DEFAULT {
2338 list = append(list, p.parseCommClause())
2340 rbrace := p.expect(token.RBRACE)
2341 p.expectSemi()
2342 body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
2344 return &ast.SelectStmt{Select: pos, Body: body}
2347 func (p *parser) parseForStmt() ast.Stmt {
2348 if p.trace {
2349 defer un(trace(p, "ForStmt"))
2352 pos := p.expect(token.FOR)
2354 var s1, s2, s3 ast.Stmt
2355 var isRange bool
2356 if p.tok != token.LBRACE {
2357 prevLev := p.exprLev
2358 p.exprLev = -1
2359 if p.tok != token.SEMICOLON {
2360 if p.tok == token.RANGE {
2361 // "for range x" (nil lhs in assignment)
2362 pos := p.pos
2363 p.next()
2364 y := []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
2365 s2 = &ast.AssignStmt{Rhs: y}
2366 isRange = true
2367 } else {
2368 s2, isRange = p.parseSimpleStmt(rangeOk)
2371 if !isRange && p.tok == token.SEMICOLON {
2372 p.next()
2373 s1 = s2
2374 s2 = nil
2375 if p.tok != token.SEMICOLON {
2376 s2, _ = p.parseSimpleStmt(basic)
2378 p.expectSemi()
2379 if p.tok != token.LBRACE {
2380 s3, _ = p.parseSimpleStmt(basic)
2383 p.exprLev = prevLev
2386 body := p.parseBlockStmt()
2387 p.expectSemi()
2389 if isRange {
2390 as := s2.(*ast.AssignStmt)
2391 // check lhs
2392 var key, value ast.Expr
2393 switch len(as.Lhs) {
2394 case 0:
2395 // nothing to do
2396 case 1:
2397 key = as.Lhs[0]
2398 case 2:
2399 key, value = as.Lhs[0], as.Lhs[1]
2400 default:
2401 p.errorExpected(as.Lhs[len(as.Lhs)-1].Pos(), "at most 2 expressions")
2402 return &ast.BadStmt{From: pos, To: p.safePos(body.End())}
2404 // parseSimpleStmt returned a right-hand side that
2405 // is a single unary expression of the form "range x"
2406 x := as.Rhs[0].(*ast.UnaryExpr).X
2407 return &ast.RangeStmt{
2408 For: pos,
2409 Key: key,
2410 Value: value,
2411 TokPos: as.TokPos,
2412 Tok: as.Tok,
2413 X: x,
2414 Body: body,
2418 // regular for statement
2419 return &ast.ForStmt{
2420 For: pos,
2421 Init: s1,
2422 Cond: p.makeExpr(s2, "boolean or range expression"),
2423 Post: s3,
2424 Body: body,
2428 func (p *parser) parseStmt() (s ast.Stmt) {
2429 if p.trace {
2430 defer un(trace(p, "Statement"))
2433 switch p.tok {
2434 case token.CONST, token.TYPE, token.VAR:
2435 s = &ast.DeclStmt{Decl: p.parseDecl(stmtStart)}
2436 case
2437 // tokens that may start an expression
2438 token.IDENT, token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING, token.FUNC, token.LPAREN, // operands
2439 token.LBRACK, token.STRUCT, token.MAP, token.CHAN, token.INTERFACE, // composite types
2440 token.ADD, token.SUB, token.MUL, token.AND, token.XOR, token.ARROW, token.NOT: // unary operators
2441 s, _ = p.parseSimpleStmt(labelOk)
2442 // because of the required look-ahead, labeled statements are
2443 // parsed by parseSimpleStmt - don't expect a semicolon after
2444 // them
2445 if _, isLabeledStmt := s.(*ast.LabeledStmt); !isLabeledStmt {
2446 p.expectSemi()
2448 case token.GO:
2449 s = p.parseGoStmt()
2450 case token.DEFER:
2451 s = p.parseDeferStmt()
2452 case token.RETURN:
2453 s = p.parseReturnStmt()
2454 case token.BREAK, token.CONTINUE, token.GOTO, token.FALLTHROUGH:
2455 s = p.parseBranchStmt(p.tok)
2456 case token.LBRACE:
2457 s = p.parseBlockStmt()
2458 p.expectSemi()
2459 case token.IF:
2460 s = p.parseIfStmt()
2461 case token.SWITCH:
2462 s = p.parseSwitchStmt()
2463 case token.SELECT:
2464 s = p.parseSelectStmt()
2465 case token.FOR:
2466 s = p.parseForStmt()
2467 case token.SEMICOLON:
2468 // Is it ever possible to have an implicit semicolon
2469 // producing an empty statement in a valid program?
2470 // (handle correctly anyway)
2471 s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: p.lit == "\n"}
2472 p.next()
2473 case token.RBRACE:
2474 // a semicolon may be omitted before a closing "}"
2475 s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: true}
2476 default:
2477 // no statement found
2478 pos := p.pos
2479 p.errorExpected(pos, "statement")
2480 p.advance(stmtStart)
2481 s = &ast.BadStmt{From: pos, To: p.pos}
2484 return
2487 // ----------------------------------------------------------------------------
2488 // Declarations
2490 type parseSpecFunction func(doc *ast.CommentGroup, pos token.Pos, keyword token.Token, iota int) ast.Spec
2492 func isValidImport(lit string) bool {
2493 const illegalChars = `!"#$%&'()*,:;<=>?[\]^{|}` + "`\uFFFD"
2494 s, _ := strconv.Unquote(lit) // go/scanner returns a legal string literal
2495 for _, r := range s {
2496 if !unicode.IsGraphic(r) || unicode.IsSpace(r) || strings.ContainsRune(illegalChars, r) {
2497 return false
2500 return s != ""
2503 func (p *parser) parseImportSpec(doc *ast.CommentGroup, _ token.Pos, _ token.Token, _ int) ast.Spec {
2504 if p.trace {
2505 defer un(trace(p, "ImportSpec"))
2508 var ident *ast.Ident
2509 switch p.tok {
2510 case token.PERIOD:
2511 ident = &ast.Ident{NamePos: p.pos, Name: "."}
2512 p.next()
2513 case token.IDENT:
2514 ident = p.parseIdent()
2517 pos := p.pos
2518 var path string
2519 if p.tok == token.STRING {
2520 path = p.lit
2521 if !isValidImport(path) {
2522 p.error(pos, "invalid import path: "+path)
2524 p.next()
2525 } else {
2526 p.expect(token.STRING) // use expect() error handling
2528 p.expectSemi() // call before accessing p.linecomment
2530 // collect imports
2531 spec := &ast.ImportSpec{
2532 Doc: doc,
2533 Name: ident,
2534 Path: &ast.BasicLit{ValuePos: pos, Kind: token.STRING, Value: path},
2535 Comment: p.lineComment,
2537 p.imports = append(p.imports, spec)
2539 return spec
2542 func (p *parser) parseValueSpec(doc *ast.CommentGroup, _ token.Pos, keyword token.Token, iota int) ast.Spec {
2543 if p.trace {
2544 defer un(trace(p, keyword.String()+"Spec"))
2547 pos := p.pos
2548 idents := p.parseIdentList()
2549 typ := p.tryIdentOrType()
2550 var values []ast.Expr
2551 // always permit optional initialization for more tolerant parsing
2552 if p.tok == token.ASSIGN {
2553 p.next()
2554 values = p.parseList(true)
2556 p.expectSemi() // call before accessing p.linecomment
2558 switch keyword {
2559 case token.VAR:
2560 if typ == nil && values == nil {
2561 p.error(pos, "missing variable type or initialization")
2563 case token.CONST:
2564 if values == nil && (iota == 0 || typ != nil) {
2565 p.error(pos, "missing constant value")
2569 spec := &ast.ValueSpec{
2570 Doc: doc,
2571 Names: idents,
2572 Type: typ,
2573 Values: values,
2574 Comment: p.lineComment,
2576 return spec
2579 func (p *parser) parseGenericType(spec *ast.TypeSpec, openPos token.Pos, name0 *ast.Ident, typ0 ast.Expr) {
2580 if p.trace {
2581 defer un(trace(p, "parseGenericType"))
2584 list := p.parseParameterList(name0, typ0, token.RBRACK)
2585 closePos := p.expect(token.RBRACK)
2586 spec.TypeParams = &ast.FieldList{Opening: openPos, List: list, Closing: closePos}
2587 // Let the type checker decide whether to accept type parameters on aliases:
2588 // see issue #46477.
2589 if p.tok == token.ASSIGN {
2590 // type alias
2591 spec.Assign = p.pos
2592 p.next()
2594 spec.Type = p.parseType()
2597 func (p *parser) parseTypeSpec(doc *ast.CommentGroup, _ token.Pos, _ token.Token, _ int) ast.Spec {
2598 if p.trace {
2599 defer un(trace(p, "TypeSpec"))
2602 ident := p.parseIdent()
2603 spec := &ast.TypeSpec{Doc: doc, Name: ident}
2605 if p.tok == token.LBRACK && p.allowGenerics() {
2606 lbrack := p.pos
2607 p.next()
2608 if p.tok == token.IDENT {
2609 // We may have an array type or a type parameter list.
2610 // In either case we expect an expression x (which may
2611 // just be a name, or a more complex expression) which
2612 // we can analyze further.
2614 // A type parameter list may have a type bound starting
2615 // with a "[" as in: P []E. In that case, simply parsing
2616 // an expression would lead to an error: P[] is invalid.
2617 // But since index or slice expressions are never constant
2618 // and thus invalid array length expressions, if we see a
2619 // "[" following a name it must be the start of an array
2620 // or slice constraint. Only if we don't see a "[" do we
2621 // need to parse a full expression.
2623 // Index or slice expressions are never constant and thus invalid
2624 // array length expressions. Thus, if we see a "[" following name
2625 // we can safely assume that "[" name starts a type parameter list.
2626 var x ast.Expr = p.parseIdent()
2627 if p.tok != token.LBRACK {
2628 // To parse the expression starting with name, expand
2629 // the call sequence we would get by passing in name
2630 // to parser.expr, and pass in name to parsePrimaryExpr.
2631 p.exprLev++
2632 lhs := p.parsePrimaryExpr(x)
2633 x = p.parseBinaryExpr(lhs, token.LowestPrec+1, false)
2634 p.exprLev--
2637 // analyze the cases
2638 var pname *ast.Ident // pname != nil means pname is the type parameter name
2639 var ptype ast.Expr // ptype != nil means ptype is the type parameter type; pname != nil in this case
2641 switch t := x.(type) {
2642 case *ast.Ident:
2643 // Unless we see a "]", we are at the start of a type parameter list.
2644 if p.tok != token.RBRACK {
2645 // d.Name "[" name ...
2646 pname = t
2647 // no ptype
2649 case *ast.BinaryExpr:
2650 // If we have an expression of the form name*T, and T is a (possibly
2651 // parenthesized) type literal or the next token is a comma, we are
2652 // at the start of a type parameter list.
2653 if name, _ := t.X.(*ast.Ident); name != nil {
2654 if t.Op == token.MUL && (isTypeLit(t.Y) || p.tok == token.COMMA) {
2655 // d.Name "[" name "*" t.Y
2656 // d.Name "[" name "*" t.Y ","
2657 // convert t into unary *t.Y
2658 pname = name
2659 ptype = &ast.StarExpr{Star: t.OpPos, X: t.Y}
2662 if pname == nil {
2663 // A normal binary expression. Since we passed check=false, we must
2664 // now check its operands.
2665 p.checkBinaryExpr(t)
2667 case *ast.CallExpr:
2668 // If we have an expression of the form name(T), and T is a (possibly
2669 // parenthesized) type literal or the next token is a comma, we are
2670 // at the start of a type parameter list.
2671 if name, _ := t.Fun.(*ast.Ident); name != nil {
2672 if len(t.Args) == 1 && !t.Ellipsis.IsValid() && (isTypeLit(t.Args[0]) || p.tok == token.COMMA) {
2673 // d.Name "[" name "(" t.ArgList[0] ")"
2674 // d.Name "[" name "(" t.ArgList[0] ")" ","
2675 pname = name
2676 ptype = t.Args[0]
2681 if pname != nil {
2682 // d.Name "[" pname ...
2683 // d.Name "[" pname ptype ...
2684 // d.Name "[" pname ptype "," ...
2685 p.parseGenericType(spec, lbrack, pname, ptype)
2686 } else {
2687 // d.Name "[" x ...
2688 spec.Type = p.parseArrayType(lbrack, x)
2690 } else {
2691 // array type
2692 spec.Type = p.parseArrayType(lbrack, nil)
2694 } else {
2695 // no type parameters
2696 if p.tok == token.ASSIGN {
2697 // type alias
2698 spec.Assign = p.pos
2699 p.next()
2701 spec.Type = p.parseType()
2704 p.expectSemi() // call before accessing p.linecomment
2705 spec.Comment = p.lineComment
2707 return spec
2710 // isTypeLit reports whether x is a (possibly parenthesized) type literal.
2711 func isTypeLit(x ast.Expr) bool {
2712 switch x := x.(type) {
2713 case *ast.ArrayType, *ast.StructType, *ast.FuncType, *ast.InterfaceType, *ast.MapType, *ast.ChanType:
2714 return true
2715 case *ast.StarExpr:
2716 // *T may be a pointer dereferenciation.
2717 // Only consider *T as type literal if T is a type literal.
2718 return isTypeLit(x.X)
2719 case *ast.ParenExpr:
2720 return isTypeLit(x.X)
2722 return false
2725 func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl {
2726 if p.trace {
2727 defer un(trace(p, "GenDecl("+keyword.String()+")"))
2730 doc := p.leadComment
2731 pos := p.expect(keyword)
2732 var lparen, rparen token.Pos
2733 var list []ast.Spec
2734 if p.tok == token.LPAREN {
2735 lparen = p.pos
2736 p.next()
2737 for iota := 0; p.tok != token.RPAREN && p.tok != token.EOF; iota++ {
2738 list = append(list, f(p.leadComment, pos, keyword, iota))
2740 rparen = p.expect(token.RPAREN)
2741 p.expectSemi()
2742 } else {
2743 list = append(list, f(nil, pos, keyword, 0))
2746 return &ast.GenDecl{
2747 Doc: doc,
2748 TokPos: pos,
2749 Tok: keyword,
2750 Lparen: lparen,
2751 Specs: list,
2752 Rparen: rparen,
2756 func (p *parser) parseFuncDecl() *ast.FuncDecl {
2757 if p.trace {
2758 defer un(trace(p, "FunctionDecl"))
2761 doc := p.leadComment
2762 pos := p.expect(token.FUNC)
2764 var recv *ast.FieldList
2765 if p.tok == token.LPAREN {
2766 _, recv = p.parseParameters(false)
2769 ident := p.parseIdent()
2771 tparams, params := p.parseParameters(true)
2772 if recv != nil && tparams != nil {
2773 // Method declarations do not have type parameters. We parse them for a
2774 // better error message and improved error recovery.
2775 p.error(tparams.Opening, "method must have no type parameters")
2776 tparams = nil
2778 results := p.parseResult()
2780 var body *ast.BlockStmt
2781 switch p.tok {
2782 case token.LBRACE:
2783 body = p.parseBody()
2784 p.expectSemi()
2785 case token.SEMICOLON:
2786 p.next()
2787 if p.tok == token.LBRACE {
2788 // opening { of function declaration on next line
2789 p.error(p.pos, "unexpected semicolon or newline before {")
2790 body = p.parseBody()
2791 p.expectSemi()
2793 default:
2794 p.expectSemi()
2797 decl := &ast.FuncDecl{
2798 Doc: doc,
2799 Recv: recv,
2800 Name: ident,
2801 Type: &ast.FuncType{
2802 Func: pos,
2803 TypeParams: tparams,
2804 Params: params,
2805 Results: results,
2807 Body: body,
2809 return decl
2812 func (p *parser) parseDecl(sync map[token.Token]bool) ast.Decl {
2813 if p.trace {
2814 defer un(trace(p, "Declaration"))
2817 var f parseSpecFunction
2818 switch p.tok {
2819 case token.CONST, token.VAR:
2820 f = p.parseValueSpec
2822 case token.TYPE:
2823 f = p.parseTypeSpec
2825 case token.FUNC:
2826 return p.parseFuncDecl()
2828 default:
2829 pos := p.pos
2830 p.errorExpected(pos, "declaration")
2831 p.advance(sync)
2832 return &ast.BadDecl{From: pos, To: p.pos}
2835 return p.parseGenDecl(p.tok, f)
2838 // ----------------------------------------------------------------------------
2839 // Source files
2841 func (p *parser) parseFile() *ast.File {
2842 if p.trace {
2843 defer un(trace(p, "File"))
2846 // Don't bother parsing the rest if we had errors scanning the first token.
2847 // Likely not a Go source file at all.
2848 if p.errors.Len() != 0 {
2849 return nil
2852 // package clause
2853 doc := p.leadComment
2854 pos := p.expect(token.PACKAGE)
2855 // Go spec: The package clause is not a declaration;
2856 // the package name does not appear in any scope.
2857 ident := p.parseIdent()
2858 if ident.Name == "_" && p.mode&DeclarationErrors != 0 {
2859 p.error(p.pos, "invalid package name _")
2861 p.expectSemi()
2863 // Don't bother parsing the rest if we had errors parsing the package clause.
2864 // Likely not a Go source file at all.
2865 if p.errors.Len() != 0 {
2866 return nil
2869 var decls []ast.Decl
2870 if p.mode&PackageClauseOnly == 0 {
2871 // import decls
2872 for p.tok == token.IMPORT {
2873 decls = append(decls, p.parseGenDecl(token.IMPORT, p.parseImportSpec))
2876 if p.mode&ImportsOnly == 0 {
2877 // rest of package body
2878 for p.tok != token.EOF {
2879 decls = append(decls, p.parseDecl(declStart))
2884 f := &ast.File{
2885 Doc: doc,
2886 Package: pos,
2887 Name: ident,
2888 Decls: decls,
2889 Imports: p.imports,
2890 Comments: p.comments,
2892 var declErr func(token.Pos, string)
2893 if p.mode&DeclarationErrors != 0 {
2894 declErr = p.error
2896 if p.mode&SkipObjectResolution == 0 {
2897 resolveFile(f, p.file, declErr)
2900 return f