Source file src/go/parser/parser.go

Documentation: go/parser

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  // Package parser implements a parser for Go source files. Input may be
     6  // provided in a variety of forms (see the various Parse* functions); the
     7  // output is an abstract syntax tree (AST) representing the Go source. The
     8  // parser is invoked through one of the Parse* functions.
     9  //
    10  // The parser accepts a larger language than is syntactically permitted by
    11  // the Go spec, for simplicity, and for improved robustness in the presence
    12  // of syntax errors. For instance, in method declarations, the receiver is
    13  // treated like an ordinary parameter list and thus may contain multiple
    14  // entries where the spec permits exactly one. Consequently, the corresponding
    15  // field in the AST (ast.FuncDecl.Recv) field is not restricted to one entry.
    16  //
    17  package parser
    18  
    19  import (
    20  	"fmt"
    21  	"go/ast"
    22  	"go/scanner"
    23  	"go/token"
    24  	"strconv"
    25  	"strings"
    26  	"unicode"
    27  )
    28  
    29  // The parser structure holds the parser's internal state.
    30  type parser struct {
    31  	file    *token.File
    32  	errors  scanner.ErrorList
    33  	scanner scanner.Scanner
    34  
    35  	// Tracing/debugging
    36  	mode   Mode // parsing mode
    37  	trace  bool // == (mode & Trace != 0)
    38  	indent int  // indentation used for tracing output
    39  
    40  	// Comments
    41  	comments    []*ast.CommentGroup
    42  	leadComment *ast.CommentGroup // last lead comment
    43  	lineComment *ast.CommentGroup // last line comment
    44  
    45  	// Next token
    46  	pos token.Pos   // token position
    47  	tok token.Token // one token look-ahead
    48  	lit string      // token literal
    49  
    50  	// Error recovery
    51  	// (used to limit the number of calls to parser.advance
    52  	// w/o making scanning progress - avoids potential endless
    53  	// loops across multiple parser functions during error recovery)
    54  	syncPos token.Pos // last synchronization position
    55  	syncCnt int       // number of parser.advance calls without progress
    56  
    57  	// Non-syntactic parser control
    58  	exprLev int  // < 0: in control clause, >= 0: in expression
    59  	inRhs   bool // if set, the parser is parsing a rhs expression
    60  
    61  	// Ordinary identifier scopes
    62  	pkgScope   *ast.Scope        // pkgScope.Outer == nil
    63  	topScope   *ast.Scope        // top-most scope; may be pkgScope
    64  	unresolved []*ast.Ident      // unresolved identifiers
    65  	imports    []*ast.ImportSpec // list of imports
    66  
    67  	// Label scopes
    68  	// (maintained by open/close LabelScope)
    69  	labelScope  *ast.Scope     // label scope for current function
    70  	targetStack [][]*ast.Ident // stack of unresolved labels
    71  }
    72  
    73  func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode Mode) {
    74  	p.file = fset.AddFile(filename, -1, len(src))
    75  	var m scanner.Mode
    76  	if mode&ParseComments != 0 {
    77  		m = scanner.ScanComments
    78  	}
    79  	eh := func(pos token.Position, msg string) { p.errors.Add(pos, msg) }
    80  	p.scanner.Init(p.file, src, eh, m)
    81  
    82  	p.mode = mode
    83  	p.trace = mode&Trace != 0 // for convenience (p.trace is used frequently)
    84  
    85  	p.next()
    86  }
    87  
    88  // ----------------------------------------------------------------------------
    89  // Scoping support
    90  
    91  func (p *parser) openScope() {
    92  	p.topScope = ast.NewScope(p.topScope)
    93  }
    94  
    95  func (p *parser) closeScope() {
    96  	p.topScope = p.topScope.Outer
    97  }
    98  
    99  func (p *parser) openLabelScope() {
   100  	p.labelScope = ast.NewScope(p.labelScope)
   101  	p.targetStack = append(p.targetStack, nil)
   102  }
   103  
   104  func (p *parser) closeLabelScope() {
   105  	// resolve labels
   106  	n := len(p.targetStack) - 1
   107  	scope := p.labelScope
   108  	for _, ident := range p.targetStack[n] {
   109  		ident.Obj = scope.Lookup(ident.Name)
   110  		if ident.Obj == nil && p.mode&DeclarationErrors != 0 {
   111  			p.error(ident.Pos(), fmt.Sprintf("label %s undefined", ident.Name))
   112  		}
   113  	}
   114  	// pop label scope
   115  	p.targetStack = p.targetStack[0:n]
   116  	p.labelScope = p.labelScope.Outer
   117  }
   118  
   119  func (p *parser) declare(decl, data interface{}, scope *ast.Scope, kind ast.ObjKind, idents ...*ast.Ident) {
   120  	for _, ident := range idents {
   121  		assert(ident.Obj == nil, "identifier already declared or resolved")
   122  		obj := ast.NewObj(kind, ident.Name)
   123  		// remember the corresponding declaration for redeclaration
   124  		// errors and global variable resolution/typechecking phase
   125  		obj.Decl = decl
   126  		obj.Data = data
   127  		ident.Obj = obj
   128  		if ident.Name != "_" {
   129  			if alt := scope.Insert(obj); alt != nil && p.mode&DeclarationErrors != 0 {
   130  				prevDecl := ""
   131  				if pos := alt.Pos(); pos.IsValid() {
   132  					prevDecl = fmt.Sprintf("\n\tprevious declaration at %s", p.file.Position(pos))
   133  				}
   134  				p.error(ident.Pos(), fmt.Sprintf("%s redeclared in this block%s", ident.Name, prevDecl))
   135  			}
   136  		}
   137  	}
   138  }
   139  
   140  func (p *parser) shortVarDecl(decl *ast.AssignStmt, list []ast.Expr) {
   141  	// Go spec: A short variable declaration may redeclare variables
   142  	// provided they were originally declared in the same block with
   143  	// the same type, and at least one of the non-blank variables is new.
   144  	n := 0 // number of new variables
   145  	for _, x := range list {
   146  		if ident, isIdent := x.(*ast.Ident); isIdent {
   147  			assert(ident.Obj == nil, "identifier already declared or resolved")
   148  			obj := ast.NewObj(ast.Var, ident.Name)
   149  			// remember corresponding assignment for other tools
   150  			obj.Decl = decl
   151  			ident.Obj = obj
   152  			if ident.Name != "_" {
   153  				if alt := p.topScope.Insert(obj); alt != nil {
   154  					ident.Obj = alt // redeclaration
   155  				} else {
   156  					n++ // new declaration
   157  				}
   158  			}
   159  		} else {
   160  			p.errorExpected(x.Pos(), "identifier on left side of :=")
   161  		}
   162  	}
   163  	if n == 0 && p.mode&DeclarationErrors != 0 {
   164  		p.error(list[0].Pos(), "no new variables on left side of :=")
   165  	}
   166  }
   167  
   168  // The unresolved object is a sentinel to mark identifiers that have been added
   169  // to the list of unresolved identifiers. The sentinel is only used for verifying
   170  // internal consistency.
   171  var unresolved = new(ast.Object)
   172  
   173  // If x is an identifier, tryResolve attempts to resolve x by looking up
   174  // the object it denotes. If no object is found and collectUnresolved is
   175  // set, x is marked as unresolved and collected in the list of unresolved
   176  // identifiers.
   177  //
   178  func (p *parser) tryResolve(x ast.Expr, collectUnresolved bool) {
   179  	// nothing to do if x is not an identifier or the blank identifier
   180  	ident, _ := x.(*ast.Ident)
   181  	if ident == nil {
   182  		return
   183  	}
   184  	assert(ident.Obj == nil, "identifier already declared or resolved")
   185  	if ident.Name == "_" {
   186  		return
   187  	}
   188  	// try to resolve the identifier
   189  	for s := p.topScope; s != nil; s = s.Outer {
   190  		if obj := s.Lookup(ident.Name); obj != nil {
   191  			ident.Obj = obj
   192  			return
   193  		}
   194  	}
   195  	// all local scopes are known, so any unresolved identifier
   196  	// must be found either in the file scope, package scope
   197  	// (perhaps in another file), or universe scope --- collect
   198  	// them so that they can be resolved later
   199  	if collectUnresolved {
   200  		ident.Obj = unresolved
   201  		p.unresolved = append(p.unresolved, ident)
   202  	}
   203  }
   204  
   205  func (p *parser) resolve(x ast.Expr) {
   206  	p.tryResolve(x, true)
   207  }
   208  
   209  // ----------------------------------------------------------------------------
   210  // Parsing support
   211  
   212  func (p *parser) printTrace(a ...interface{}) {
   213  	const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . "
   214  	const n = len(dots)
   215  	pos := p.file.Position(p.pos)
   216  	fmt.Printf("%5d:%3d: ", pos.Line, pos.Column)
   217  	i := 2 * p.indent
   218  	for i > n {
   219  		fmt.Print(dots)
   220  		i -= n
   221  	}
   222  	// i <= n
   223  	fmt.Print(dots[0:i])
   224  	fmt.Println(a...)
   225  }
   226  
   227  func trace(p *parser, msg string) *parser {
   228  	p.printTrace(msg, "(")
   229  	p.indent++
   230  	return p
   231  }
   232  
   233  // Usage pattern: defer un(trace(p, "..."))
   234  func un(p *parser) {
   235  	p.indent--
   236  	p.printTrace(")")
   237  }
   238  
   239  // Advance to the next token.
   240  func (p *parser) next0() {
   241  	// Because of one-token look-ahead, print the previous token
   242  	// when tracing as it provides a more readable output. The
   243  	// very first token (!p.pos.IsValid()) is not initialized
   244  	// (it is token.ILLEGAL), so don't print it .
   245  	if p.trace && p.pos.IsValid() {
   246  		s := p.tok.String()
   247  		switch {
   248  		case p.tok.IsLiteral():
   249  			p.printTrace(s, p.lit)
   250  		case p.tok.IsOperator(), p.tok.IsKeyword():
   251  			p.printTrace("\"" + s + "\"")
   252  		default:
   253  			p.printTrace(s)
   254  		}
   255  	}
   256  
   257  	p.pos, p.tok, p.lit = p.scanner.Scan()
   258  }
   259  
   260  // Consume a comment and return it and the line on which it ends.
   261  func (p *parser) consumeComment() (comment *ast.Comment, endline int) {
   262  	// /*-style comments may end on a different line than where they start.
   263  	// Scan the comment for '\n' chars and adjust endline accordingly.
   264  	endline = p.file.Line(p.pos)
   265  	if p.lit[1] == '*' {
   266  		// don't use range here - no need to decode Unicode code points
   267  		for i := 0; i < len(p.lit); i++ {
   268  			if p.lit[i] == '\n' {
   269  				endline++
   270  			}
   271  		}
   272  	}
   273  
   274  	comment = &ast.Comment{Slash: p.pos, Text: p.lit}
   275  	p.next0()
   276  
   277  	return
   278  }
   279  
   280  // Consume a group of adjacent comments, add it to the parser's
   281  // comments list, and return it together with the line at which
   282  // the last comment in the group ends. A non-comment token or n
   283  // empty lines terminate a comment group.
   284  //
   285  func (p *parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline int) {
   286  	var list []*ast.Comment
   287  	endline = p.file.Line(p.pos)
   288  	for p.tok == token.COMMENT && p.file.Line(p.pos) <= endline+n {
   289  		var comment *ast.Comment
   290  		comment, endline = p.consumeComment()
   291  		list = append(list, comment)
   292  	}
   293  
   294  	// add comment group to the comments list
   295  	comments = &ast.CommentGroup{List: list}
   296  	p.comments = append(p.comments, comments)
   297  
   298  	return
   299  }
   300  
   301  // Advance to the next non-comment token. In the process, collect
   302  // any comment groups encountered, and remember the last lead and
   303  // line comments.
   304  //
   305  // A lead comment is a comment group that starts and ends in a
   306  // line without any other tokens and that is followed by a non-comment
   307  // token on the line immediately after the comment group.
   308  //
   309  // A line comment is a comment group that follows a non-comment
   310  // token on the same line, and that has no tokens after it on the line
   311  // where it ends.
   312  //
   313  // Lead and line comments may be considered documentation that is
   314  // stored in the AST.
   315  //
   316  func (p *parser) next() {
   317  	p.leadComment = nil
   318  	p.lineComment = nil
   319  	prev := p.pos
   320  	p.next0()
   321  
   322  	if p.tok == token.COMMENT {
   323  		var comment *ast.CommentGroup
   324  		var endline int
   325  
   326  		if p.file.Line(p.pos) == p.file.Line(prev) {
   327  			// The comment is on same line as the previous token; it
   328  			// cannot be a lead comment but may be a line comment.
   329  			comment, endline = p.consumeCommentGroup(0)
   330  			if p.file.Line(p.pos) != endline || p.tok == token.EOF {
   331  				// The next token is on a different line, thus
   332  				// the last comment group is a line comment.
   333  				p.lineComment = comment
   334  			}
   335  		}
   336  
   337  		// consume successor comments, if any
   338  		endline = -1
   339  		for p.tok == token.COMMENT {
   340  			comment, endline = p.consumeCommentGroup(1)
   341  		}
   342  
   343  		if endline+1 == p.file.Line(p.pos) {
   344  			// The next token is following on the line immediately after the
   345  			// comment group, thus the last comment group is a lead comment.
   346  			p.leadComment = comment
   347  		}
   348  	}
   349  }
   350  
   351  // A bailout panic is raised to indicate early termination.
   352  type bailout struct{}
   353  
   354  func (p *parser) error(pos token.Pos, msg string) {
   355  	epos := p.file.Position(pos)
   356  
   357  	// If AllErrors is not set, discard errors reported on the same line
   358  	// as the last recorded error and stop parsing if there are more than
   359  	// 10 errors.
   360  	if p.mode&AllErrors == 0 {
   361  		n := len(p.errors)
   362  		if n > 0 && p.errors[n-1].Pos.Line == epos.Line {
   363  			return // discard - likely a spurious error
   364  		}
   365  		if n > 10 {
   366  			panic(bailout{})
   367  		}
   368  	}
   369  
   370  	p.errors.Add(epos, msg)
   371  }
   372  
   373  func (p *parser) errorExpected(pos token.Pos, msg string) {
   374  	msg = "expected " + msg
   375  	if pos == p.pos {
   376  		// the error happened at the current position;
   377  		// make the error message more specific
   378  		switch {
   379  		case p.tok == token.SEMICOLON && p.lit == "\n":
   380  			msg += ", found newline"
   381  		case p.tok.IsLiteral():
   382  			// print 123 rather than 'INT', etc.
   383  			msg += ", found " + p.lit
   384  		default:
   385  			msg += ", found '" + p.tok.String() + "'"
   386  		}
   387  	}
   388  	p.error(pos, msg)
   389  }
   390  
   391  func (p *parser) expect(tok token.Token) token.Pos {
   392  	pos := p.pos
   393  	if p.tok != tok {
   394  		p.errorExpected(pos, "'"+tok.String()+"'")
   395  	}
   396  	p.next() // make progress
   397  	return pos
   398  }
   399  
   400  // expect2 is like expect, but it returns an invalid position
   401  // if the expected token is not found.
   402  func (p *parser) expect2(tok token.Token) (pos token.Pos) {
   403  	if p.tok == tok {
   404  		pos = p.pos
   405  	} else {
   406  		p.errorExpected(p.pos, "'"+tok.String()+"'")
   407  	}
   408  	p.next() // make progress
   409  	return
   410  }
   411  
   412  // expectClosing is like expect but provides a better error message
   413  // for the common case of a missing comma before a newline.
   414  //
   415  func (p *parser) expectClosing(tok token.Token, context string) token.Pos {
   416  	if p.tok != tok && p.tok == token.SEMICOLON && p.lit == "\n" {
   417  		p.error(p.pos, "missing ',' before newline in "+context)
   418  		p.next()
   419  	}
   420  	return p.expect(tok)
   421  }
   422  
   423  func (p *parser) expectSemi() {
   424  	// semicolon is optional before a closing ')' or '}'
   425  	if p.tok != token.RPAREN && p.tok != token.RBRACE {
   426  		switch p.tok {
   427  		case token.COMMA:
   428  			// permit a ',' instead of a ';' but complain
   429  			p.errorExpected(p.pos, "';'")
   430  			fallthrough
   431  		case token.SEMICOLON:
   432  			p.next()
   433  		default:
   434  			p.errorExpected(p.pos, "';'")
   435  			p.advance(stmtStart)
   436  		}
   437  	}
   438  }
   439  
   440  func (p *parser) atComma(context string, follow token.Token) bool {
   441  	if p.tok == token.COMMA {
   442  		return true
   443  	}
   444  	if p.tok != follow {
   445  		msg := "missing ','"
   446  		if p.tok == token.SEMICOLON && p.lit == "\n" {
   447  			msg += " before newline"
   448  		}
   449  		p.error(p.pos, msg+" in "+context)
   450  		return true // "insert" comma and continue
   451  	}
   452  	return false
   453  }
   454  
   455  func assert(cond bool, msg string) {
   456  	if !cond {
   457  		panic("go/parser internal error: " + msg)
   458  	}
   459  }
   460  
   461  // advance consumes tokens until the current token p.tok
   462  // is in the 'to' set, or token.EOF. For error recovery.
   463  func (p *parser) advance(to map[token.Token]bool) {
   464  	for ; p.tok != token.EOF; p.next() {
   465  		if to[p.tok] {
   466  			// Return only if parser made some progress since last
   467  			// sync or if it has not reached 10 advance calls without
   468  			// progress. Otherwise consume at least one token to
   469  			// avoid an endless parser loop (it is possible that
   470  			// both parseOperand and parseStmt call advance and
   471  			// correctly do not advance, thus the need for the
   472  			// invocation limit p.syncCnt).
   473  			if p.pos == p.syncPos && p.syncCnt < 10 {
   474  				p.syncCnt++
   475  				return
   476  			}
   477  			if p.pos > p.syncPos {
   478  				p.syncPos = p.pos
   479  				p.syncCnt = 0
   480  				return
   481  			}
   482  			// Reaching here indicates a parser bug, likely an
   483  			// incorrect token list in this function, but it only
   484  			// leads to skipping of possibly correct code if a
   485  			// previous error is present, and thus is preferred
   486  			// over a non-terminating parse.
   487  		}
   488  	}
   489  }
   490  
   491  var stmtStart = map[token.Token]bool{
   492  	token.BREAK:       true,
   493  	token.CONST:       true,
   494  	token.CONTINUE:    true,
   495  	token.DEFER:       true,
   496  	token.FALLTHROUGH: true,
   497  	token.FOR:         true,
   498  	token.GO:          true,
   499  	token.GOTO:        true,
   500  	token.IF:          true,
   501  	token.RETURN:      true,
   502  	token.SELECT:      true,
   503  	token.SWITCH:      true,
   504  	token.TYPE:        true,
   505  	token.VAR:         true,
   506  }
   507  
   508  var declStart = map[token.Token]bool{
   509  	token.CONST: true,
   510  	token.TYPE:  true,
   511  	token.VAR:   true,
   512  }
   513  
   514  var exprEnd = map[token.Token]bool{
   515  	token.COMMA:     true,
   516  	token.COLON:     true,
   517  	token.SEMICOLON: true,
   518  	token.RPAREN:    true,
   519  	token.RBRACK:    true,
   520  	token.RBRACE:    true,
   521  }
   522  
   523  // safePos returns a valid file position for a given position: If pos
   524  // is valid to begin with, safePos returns pos. If pos is out-of-range,
   525  // safePos returns the EOF position.
   526  //
   527  // This is hack to work around "artificial" end positions in the AST which
   528  // are computed by adding 1 to (presumably valid) token positions. If the
   529  // token positions are invalid due to parse errors, the resulting end position
   530  // may be past the file's EOF position, which would lead to panics if used
   531  // later on.
   532  //
   533  func (p *parser) safePos(pos token.Pos) (res token.Pos) {
   534  	defer func() {
   535  		if recover() != nil {
   536  			res = token.Pos(p.file.Base() + p.file.Size()) // EOF position
   537  		}
   538  	}()
   539  	_ = p.file.Offset(pos) // trigger a panic if position is out-of-range
   540  	return pos
   541  }
   542  
   543  // ----------------------------------------------------------------------------
   544  // Identifiers
   545  
   546  func (p *parser) parseIdent() *ast.Ident {
   547  	pos := p.pos
   548  	name := "_"
   549  	if p.tok == token.IDENT {
   550  		name = p.lit
   551  		p.next()
   552  	} else {
   553  		p.expect(token.IDENT) // use expect() error handling
   554  	}
   555  	return &ast.Ident{NamePos: pos, Name: name}
   556  }
   557  
   558  func (p *parser) parseIdentList() (list []*ast.Ident) {
   559  	if p.trace {
   560  		defer un(trace(p, "IdentList"))
   561  	}
   562  
   563  	list = append(list, p.parseIdent())
   564  	for p.tok == token.COMMA {
   565  		p.next()
   566  		list = append(list, p.parseIdent())
   567  	}
   568  
   569  	return
   570  }
   571  
   572  // ----------------------------------------------------------------------------
   573  // Common productions
   574  
   575  // If lhs is set, result list elements which are identifiers are not resolved.
   576  func (p *parser) parseExprList(lhs bool) (list []ast.Expr) {
   577  	if p.trace {
   578  		defer un(trace(p, "ExpressionList"))
   579  	}
   580  
   581  	list = append(list, p.checkExpr(p.parseExpr(lhs)))
   582  	for p.tok == token.COMMA {
   583  		p.next()
   584  		list = append(list, p.checkExpr(p.parseExpr(lhs)))
   585  	}
   586  
   587  	return
   588  }
   589  
   590  func (p *parser) parseLhsList() []ast.Expr {
   591  	old := p.inRhs
   592  	p.inRhs = false
   593  	list := p.parseExprList(true)
   594  	switch p.tok {
   595  	case token.DEFINE:
   596  		// lhs of a short variable declaration
   597  		// but doesn't enter scope until later:
   598  		// caller must call p.shortVarDecl(p.makeIdentList(list))
   599  		// at appropriate time.
   600  	case token.COLON:
   601  		// lhs of a label declaration or a communication clause of a select
   602  		// statement (parseLhsList is not called when parsing the case clause
   603  		// of a switch statement):
   604  		// - labels are declared by the caller of parseLhsList
   605  		// - for communication clauses, if there is a stand-alone identifier
   606  		//   followed by a colon, we have a syntax error; there is no need
   607  		//   to resolve the identifier in that case
   608  	default:
   609  		// identifiers must be declared elsewhere
   610  		for _, x := range list {
   611  			p.resolve(x)
   612  		}
   613  	}
   614  	p.inRhs = old
   615  	return list
   616  }
   617  
   618  func (p *parser) parseRhsList() []ast.Expr {
   619  	old := p.inRhs
   620  	p.inRhs = true
   621  	list := p.parseExprList(false)
   622  	p.inRhs = old
   623  	return list
   624  }
   625  
   626  // ----------------------------------------------------------------------------
   627  // Types
   628  
   629  func (p *parser) parseType() ast.Expr {
   630  	if p.trace {
   631  		defer un(trace(p, "Type"))
   632  	}
   633  
   634  	typ := p.tryType()
   635  
   636  	if typ == nil {
   637  		pos := p.pos
   638  		p.errorExpected(pos, "type")
   639  		p.advance(exprEnd)
   640  		return &ast.BadExpr{From: pos, To: p.pos}
   641  	}
   642  
   643  	return typ
   644  }
   645  
   646  // If the result is an identifier, it is not resolved.
   647  func (p *parser) parseTypeName() ast.Expr {
   648  	if p.trace {
   649  		defer un(trace(p, "TypeName"))
   650  	}
   651  
   652  	ident := p.parseIdent()
   653  	// don't resolve ident yet - it may be a parameter or field name
   654  
   655  	if p.tok == token.PERIOD {
   656  		// ident is a package name
   657  		p.next()
   658  		p.resolve(ident)
   659  		sel := p.parseIdent()
   660  		return &ast.SelectorExpr{X: ident, Sel: sel}
   661  	}
   662  
   663  	return ident
   664  }
   665  
   666  func (p *parser) parseArrayType() ast.Expr {
   667  	if p.trace {
   668  		defer un(trace(p, "ArrayType"))
   669  	}
   670  
   671  	lbrack := p.expect(token.LBRACK)
   672  	p.exprLev++
   673  	var len ast.Expr
   674  	// always permit ellipsis for more fault-tolerant parsing
   675  	if p.tok == token.ELLIPSIS {
   676  		len = &ast.Ellipsis{Ellipsis: p.pos}
   677  		p.next()
   678  	} else if p.tok != token.RBRACK {
   679  		len = p.parseRhs()
   680  	}
   681  	p.exprLev--
   682  	p.expect(token.RBRACK)
   683  	elt := p.parseType()
   684  
   685  	return &ast.ArrayType{Lbrack: lbrack, Len: len, Elt: elt}
   686  }
   687  
   688  func (p *parser) makeIdentList(list []ast.Expr) []*ast.Ident {
   689  	idents := make([]*ast.Ident, len(list))
   690  	for i, x := range list {
   691  		ident, isIdent := x.(*ast.Ident)
   692  		if !isIdent {
   693  			if _, isBad := x.(*ast.BadExpr); !isBad {
   694  				// only report error if it's a new one
   695  				p.errorExpected(x.Pos(), "identifier")
   696  			}
   697  			ident = &ast.Ident{NamePos: x.Pos(), Name: "_"}
   698  		}
   699  		idents[i] = ident
   700  	}
   701  	return idents
   702  }
   703  
   704  func (p *parser) parseFieldDecl(scope *ast.Scope) *ast.Field {
   705  	if p.trace {
   706  		defer un(trace(p, "FieldDecl"))
   707  	}
   708  
   709  	doc := p.leadComment
   710  
   711  	// 1st FieldDecl
   712  	// A type name used as an anonymous field looks like a field identifier.
   713  	var list []ast.Expr
   714  	for {
   715  		list = append(list, p.parseVarType(false))
   716  		if p.tok != token.COMMA {
   717  			break
   718  		}
   719  		p.next()
   720  	}
   721  
   722  	typ := p.tryVarType(false)
   723  
   724  	// analyze case
   725  	var idents []*ast.Ident
   726  	if typ != nil {
   727  		// IdentifierList Type
   728  		idents = p.makeIdentList(list)
   729  	} else {
   730  		// ["*"] TypeName (AnonymousField)
   731  		typ = list[0] // we always have at least one element
   732  		if n := len(list); n > 1 {
   733  			p.errorExpected(p.pos, "type")
   734  			typ = &ast.BadExpr{From: p.pos, To: p.pos}
   735  		} else if !isTypeName(deref(typ)) {
   736  			p.errorExpected(typ.Pos(), "anonymous field")
   737  			typ = &ast.BadExpr{From: typ.Pos(), To: p.safePos(typ.End())}
   738  		}
   739  	}
   740  
   741  	// Tag
   742  	var tag *ast.BasicLit
   743  	if p.tok == token.STRING {
   744  		tag = &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
   745  		p.next()
   746  	}
   747  
   748  	p.expectSemi() // call before accessing p.linecomment
   749  
   750  	field := &ast.Field{Doc: doc, Names: idents, Type: typ, Tag: tag, Comment: p.lineComment}
   751  	p.declare(field, nil, scope, ast.Var, idents...)
   752  	p.resolve(typ)
   753  
   754  	return field
   755  }
   756  
   757  func (p *parser) parseStructType() *ast.StructType {
   758  	if p.trace {
   759  		defer un(trace(p, "StructType"))
   760  	}
   761  
   762  	pos := p.expect(token.STRUCT)
   763  	lbrace := p.expect(token.LBRACE)
   764  	scope := ast.NewScope(nil) // struct scope
   765  	var list []*ast.Field
   766  	for p.tok == token.IDENT || p.tok == token.MUL || p.tok == token.LPAREN {
   767  		// a field declaration cannot start with a '(' but we accept
   768  		// it here for more robust parsing and better error messages
   769  		// (parseFieldDecl will check and complain if necessary)
   770  		list = append(list, p.parseFieldDecl(scope))
   771  	}
   772  	rbrace := p.expect(token.RBRACE)
   773  
   774  	return &ast.StructType{
   775  		Struct: pos,
   776  		Fields: &ast.FieldList{
   777  			Opening: lbrace,
   778  			List:    list,
   779  			Closing: rbrace,
   780  		},
   781  	}
   782  }
   783  
   784  func (p *parser) parsePointerType() *ast.StarExpr {
   785  	if p.trace {
   786  		defer un(trace(p, "PointerType"))
   787  	}
   788  
   789  	star := p.expect(token.MUL)
   790  	base := p.parseType()
   791  
   792  	return &ast.StarExpr{Star: star, X: base}
   793  }
   794  
   795  // If the result is an identifier, it is not resolved.
   796  func (p *parser) tryVarType(isParam bool) ast.Expr {
   797  	if isParam && p.tok == token.ELLIPSIS {
   798  		pos := p.pos
   799  		p.next()
   800  		typ := p.tryIdentOrType() // don't use parseType so we can provide better error message
   801  		if typ != nil {
   802  			p.resolve(typ)
   803  		} else {
   804  			p.error(pos, "'...' parameter is missing type")
   805  			typ = &ast.BadExpr{From: pos, To: p.pos}
   806  		}
   807  		return &ast.Ellipsis{Ellipsis: pos, Elt: typ}
   808  	}
   809  	return p.tryIdentOrType()
   810  }
   811  
   812  // If the result is an identifier, it is not resolved.
   813  func (p *parser) parseVarType(isParam bool) ast.Expr {
   814  	typ := p.tryVarType(isParam)
   815  	if typ == nil {
   816  		pos := p.pos
   817  		p.errorExpected(pos, "type")
   818  		p.next() // make progress
   819  		typ = &ast.BadExpr{From: pos, To: p.pos}
   820  	}
   821  	return typ
   822  }
   823  
   824  func (p *parser) parseParameterList(scope *ast.Scope, ellipsisOk bool) (params []*ast.Field) {
   825  	if p.trace {
   826  		defer un(trace(p, "ParameterList"))
   827  	}
   828  
   829  	// 1st ParameterDecl
   830  	// A list of identifiers looks like a list of type names.
   831  	var list []ast.Expr
   832  	for {
   833  		list = append(list, p.parseVarType(ellipsisOk))
   834  		if p.tok != token.COMMA {
   835  			break
   836  		}
   837  		p.next()
   838  		if p.tok == token.RPAREN {
   839  			break
   840  		}
   841  	}
   842  
   843  	// analyze case
   844  	if typ := p.tryVarType(ellipsisOk); typ != nil {
   845  		// IdentifierList Type
   846  		idents := p.makeIdentList(list)
   847  		field := &ast.Field{Names: idents, Type: typ}
   848  		params = append(params, field)
   849  		// Go spec: The scope of an identifier denoting a function
   850  		// parameter or result variable is the function body.
   851  		p.declare(field, nil, scope, ast.Var, idents...)
   852  		p.resolve(typ)
   853  		if !p.atComma("parameter list", token.RPAREN) {
   854  			return
   855  		}
   856  		p.next()
   857  		for p.tok != token.RPAREN && p.tok != token.EOF {
   858  			idents := p.parseIdentList()
   859  			typ := p.parseVarType(ellipsisOk)
   860  			field := &ast.Field{Names: idents, Type: typ}
   861  			params = append(params, field)
   862  			// Go spec: The scope of an identifier denoting a function
   863  			// parameter or result variable is the function body.
   864  			p.declare(field, nil, scope, ast.Var, idents...)
   865  			p.resolve(typ)
   866  			if !p.atComma("parameter list", token.RPAREN) {
   867  				break
   868  			}
   869  			p.next()
   870  		}
   871  		return
   872  	}
   873  
   874  	// Type { "," Type } (anonymous parameters)
   875  	params = make([]*ast.Field, len(list))
   876  	for i, typ := range list {
   877  		p.resolve(typ)
   878  		params[i] = &ast.Field{Type: typ}
   879  	}
   880  	return
   881  }
   882  
   883  func (p *parser) parseParameters(scope *ast.Scope, ellipsisOk bool) *ast.FieldList {
   884  	if p.trace {
   885  		defer un(trace(p, "Parameters"))
   886  	}
   887  
   888  	var params []*ast.Field
   889  	lparen := p.expect(token.LPAREN)
   890  	if p.tok != token.RPAREN {
   891  		params = p.parseParameterList(scope, ellipsisOk)
   892  	}
   893  	rparen := p.expect(token.RPAREN)
   894  
   895  	return &ast.FieldList{Opening: lparen, List: params, Closing: rparen}
   896  }
   897  
   898  func (p *parser) parseResult(scope *ast.Scope) *ast.FieldList {
   899  	if p.trace {
   900  		defer un(trace(p, "Result"))
   901  	}
   902  
   903  	if p.tok == token.LPAREN {
   904  		return p.parseParameters(scope, false)
   905  	}
   906  
   907  	typ := p.tryType()
   908  	if typ != nil {
   909  		list := make([]*ast.Field, 1)
   910  		list[0] = &ast.Field{Type: typ}
   911  		return &ast.FieldList{List: list}
   912  	}
   913  
   914  	return nil
   915  }
   916  
   917  func (p *parser) parseSignature(scope *ast.Scope) (params, results *ast.FieldList) {
   918  	if p.trace {
   919  		defer un(trace(p, "Signature"))
   920  	}
   921  
   922  	params = p.parseParameters(scope, true)
   923  	results = p.parseResult(scope)
   924  
   925  	return
   926  }
   927  
   928  func (p *parser) parseFuncType() (*ast.FuncType, *ast.Scope) {
   929  	if p.trace {
   930  		defer un(trace(p, "FuncType"))
   931  	}
   932  
   933  	pos := p.expect(token.FUNC)
   934  	scope := ast.NewScope(p.topScope) // function scope
   935  	params, results := p.parseSignature(scope)
   936  
   937  	return &ast.FuncType{Func: pos, Params: params, Results: results}, scope
   938  }
   939  
   940  func (p *parser) parseMethodSpec(scope *ast.Scope) *ast.Field {
   941  	if p.trace {
   942  		defer un(trace(p, "MethodSpec"))
   943  	}
   944  
   945  	doc := p.leadComment
   946  	var idents []*ast.Ident
   947  	var typ ast.Expr
   948  	x := p.parseTypeName()
   949  	if ident, isIdent := x.(*ast.Ident); isIdent && p.tok == token.LPAREN {
   950  		// method
   951  		idents = []*ast.Ident{ident}
   952  		scope := ast.NewScope(nil) // method scope
   953  		params, results := p.parseSignature(scope)
   954  		typ = &ast.FuncType{Func: token.NoPos, Params: params, Results: results}
   955  	} else {
   956  		// embedded interface
   957  		typ = x
   958  		p.resolve(typ)
   959  	}
   960  	p.expectSemi() // call before accessing p.linecomment
   961  
   962  	spec := &ast.Field{Doc: doc, Names: idents, Type: typ, Comment: p.lineComment}
   963  	p.declare(spec, nil, scope, ast.Fun, idents...)
   964  
   965  	return spec
   966  }
   967  
   968  func (p *parser) parseInterfaceType() *ast.InterfaceType {
   969  	if p.trace {
   970  		defer un(trace(p, "InterfaceType"))
   971  	}
   972  
   973  	pos := p.expect(token.INTERFACE)
   974  	lbrace := p.expect(token.LBRACE)
   975  	scope := ast.NewScope(nil) // interface scope
   976  	var list []*ast.Field
   977  	for p.tok == token.IDENT {
   978  		list = append(list, p.parseMethodSpec(scope))
   979  	}
   980  	rbrace := p.expect(token.RBRACE)
   981  
   982  	return &ast.InterfaceType{
   983  		Interface: pos,
   984  		Methods: &ast.FieldList{
   985  			Opening: lbrace,
   986  			List:    list,
   987  			Closing: rbrace,
   988  		},
   989  	}
   990  }
   991  
   992  func (p *parser) parseMapType() *ast.MapType {
   993  	if p.trace {
   994  		defer un(trace(p, "MapType"))
   995  	}
   996  
   997  	pos := p.expect(token.MAP)
   998  	p.expect(token.LBRACK)
   999  	key := p.parseType()
  1000  	p.expect(token.RBRACK)
  1001  	value := p.parseType()
  1002  
  1003  	return &ast.MapType{Map: pos, Key: key, Value: value}
  1004  }
  1005  
  1006  func (p *parser) parseChanType() *ast.ChanType {
  1007  	if p.trace {
  1008  		defer un(trace(p, "ChanType"))
  1009  	}
  1010  
  1011  	pos := p.pos
  1012  	dir := ast.SEND | ast.RECV
  1013  	var arrow token.Pos
  1014  	if p.tok == token.CHAN {
  1015  		p.next()
  1016  		if p.tok == token.ARROW {
  1017  			arrow = p.pos
  1018  			p.next()
  1019  			dir = ast.SEND
  1020  		}
  1021  	} else {
  1022  		arrow = p.expect(token.ARROW)
  1023  		p.expect(token.CHAN)
  1024  		dir = ast.RECV
  1025  	}
  1026  	value := p.parseType()
  1027  
  1028  	return &ast.ChanType{Begin: pos, Arrow: arrow, Dir: dir, Value: value}
  1029  }
  1030  
  1031  // If the result is an identifier, it is not resolved.
  1032  func (p *parser) tryIdentOrType() ast.Expr {
  1033  	switch p.tok {
  1034  	case token.IDENT:
  1035  		return p.parseTypeName()
  1036  	case token.LBRACK:
  1037  		return p.parseArrayType()
  1038  	case token.STRUCT:
  1039  		return p.parseStructType()
  1040  	case token.MUL:
  1041  		return p.parsePointerType()
  1042  	case token.FUNC:
  1043  		typ, _ := p.parseFuncType()
  1044  		return typ
  1045  	case token.INTERFACE:
  1046  		return p.parseInterfaceType()
  1047  	case token.MAP:
  1048  		return p.parseMapType()
  1049  	case token.CHAN, token.ARROW:
  1050  		return p.parseChanType()
  1051  	case token.LPAREN:
  1052  		lparen := p.pos
  1053  		p.next()
  1054  		typ := p.parseType()
  1055  		rparen := p.expect(token.RPAREN)
  1056  		return &ast.ParenExpr{Lparen: lparen, X: typ, Rparen: rparen}
  1057  	}
  1058  
  1059  	// no type found
  1060  	return nil
  1061  }
  1062  
  1063  func (p *parser) tryType() ast.Expr {
  1064  	typ := p.tryIdentOrType()
  1065  	if typ != nil {
  1066  		p.resolve(typ)
  1067  	}
  1068  	return typ
  1069  }
  1070  
  1071  // ----------------------------------------------------------------------------
  1072  // Blocks
  1073  
  1074  func (p *parser) parseStmtList() (list []ast.Stmt) {
  1075  	if p.trace {
  1076  		defer un(trace(p, "StatementList"))
  1077  	}
  1078  
  1079  	for p.tok != token.CASE && p.tok != token.DEFAULT && p.tok != token.RBRACE && p.tok != token.EOF {
  1080  		list = append(list, p.parseStmt())
  1081  	}
  1082  
  1083  	return
  1084  }
  1085  
  1086  func (p *parser) parseBody(scope *ast.Scope) *ast.BlockStmt {
  1087  	if p.trace {
  1088  		defer un(trace(p, "Body"))
  1089  	}
  1090  
  1091  	lbrace := p.expect(token.LBRACE)
  1092  	p.topScope = scope // open function scope
  1093  	p.openLabelScope()
  1094  	list := p.parseStmtList()
  1095  	p.closeLabelScope()
  1096  	p.closeScope()
  1097  	rbrace := p.expect2(token.RBRACE)
  1098  
  1099  	return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
  1100  }
  1101  
  1102  func (p *parser) parseBlockStmt() *ast.BlockStmt {
  1103  	if p.trace {
  1104  		defer un(trace(p, "BlockStmt"))
  1105  	}
  1106  
  1107  	lbrace := p.expect(token.LBRACE)
  1108  	p.openScope()
  1109  	list := p.parseStmtList()
  1110  	p.closeScope()
  1111  	rbrace := p.expect2(token.RBRACE)
  1112  
  1113  	return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
  1114  }
  1115  
  1116  // ----------------------------------------------------------------------------
  1117  // Expressions
  1118  
  1119  func (p *parser) parseFuncTypeOrLit() ast.Expr {
  1120  	if p.trace {
  1121  		defer un(trace(p, "FuncTypeOrLit"))
  1122  	}
  1123  
  1124  	typ, scope := p.parseFuncType()
  1125  	if p.tok != token.LBRACE {
  1126  		// function type only
  1127  		return typ
  1128  	}
  1129  
  1130  	p.exprLev++
  1131  	body := p.parseBody(scope)
  1132  	p.exprLev--
  1133  
  1134  	return &ast.FuncLit{Type: typ, Body: body}
  1135  }
  1136  
  1137  // parseOperand may return an expression or a raw type (incl. array
  1138  // types of the form [...]T. Callers must verify the result.
  1139  // If lhs is set and the result is an identifier, it is not resolved.
  1140  //
  1141  func (p *parser) parseOperand(lhs bool) ast.Expr {
  1142  	if p.trace {
  1143  		defer un(trace(p, "Operand"))
  1144  	}
  1145  
  1146  	switch p.tok {
  1147  	case token.IDENT:
  1148  		x := p.parseIdent()
  1149  		if !lhs {
  1150  			p.resolve(x)
  1151  		}
  1152  		return x
  1153  
  1154  	case token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING:
  1155  		x := &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
  1156  		p.next()
  1157  		return x
  1158  
  1159  	case token.LPAREN:
  1160  		lparen := p.pos
  1161  		p.next()
  1162  		p.exprLev++
  1163  		x := p.parseRhsOrType() // types may be parenthesized: (some type)
  1164  		p.exprLev--
  1165  		rparen := p.expect(token.RPAREN)
  1166  		return &ast.ParenExpr{Lparen: lparen, X: x, Rparen: rparen}
  1167  
  1168  	case token.FUNC:
  1169  		return p.parseFuncTypeOrLit()
  1170  	}
  1171  
  1172  	if typ := p.tryIdentOrType(); typ != nil {
  1173  		// could be type for composite literal or conversion
  1174  		_, isIdent := typ.(*ast.Ident)
  1175  		assert(!isIdent, "type cannot be identifier")
  1176  		return typ
  1177  	}
  1178  
  1179  	// we have an error
  1180  	pos := p.pos
  1181  	p.errorExpected(pos, "operand")
  1182  	p.advance(stmtStart)
  1183  	return &ast.BadExpr{From: pos, To: p.pos}
  1184  }
  1185  
  1186  func (p *parser) parseSelector(x ast.Expr) ast.Expr {
  1187  	if p.trace {
  1188  		defer un(trace(p, "Selector"))
  1189  	}
  1190  
  1191  	sel := p.parseIdent()
  1192  
  1193  	return &ast.SelectorExpr{X: x, Sel: sel}
  1194  }
  1195  
  1196  func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr {
  1197  	if p.trace {
  1198  		defer un(trace(p, "TypeAssertion"))
  1199  	}
  1200  
  1201  	lparen := p.expect(token.LPAREN)
  1202  	var typ ast.Expr
  1203  	if p.tok == token.TYPE {
  1204  		// type switch: typ == nil
  1205  		p.next()
  1206  	} else {
  1207  		typ = p.parseType()
  1208  	}
  1209  	rparen := p.expect(token.RPAREN)
  1210  
  1211  	return &ast.TypeAssertExpr{X: x, Type: typ, Lparen: lparen, Rparen: rparen}
  1212  }
  1213  
  1214  func (p *parser) parseIndexOrSlice(x ast.Expr) ast.Expr {
  1215  	if p.trace {
  1216  		defer un(trace(p, "IndexOrSlice"))
  1217  	}
  1218  
  1219  	const N = 3 // change the 3 to 2 to disable 3-index slices
  1220  	lbrack := p.expect(token.LBRACK)
  1221  	p.exprLev++
  1222  	var index [N]ast.Expr
  1223  	var colons [N - 1]token.Pos
  1224  	if p.tok != token.COLON {
  1225  		index[0] = p.parseRhs()
  1226  	}
  1227  	ncolons := 0
  1228  	for p.tok == token.COLON && ncolons < len(colons) {
  1229  		colons[ncolons] = p.pos
  1230  		ncolons++
  1231  		p.next()
  1232  		if p.tok != token.COLON && p.tok != token.RBRACK && p.tok != token.EOF {
  1233  			index[ncolons] = p.parseRhs()
  1234  		}
  1235  	}
  1236  	p.exprLev--
  1237  	rbrack := p.expect(token.RBRACK)
  1238  
  1239  	if ncolons > 0 {
  1240  		// slice expression
  1241  		slice3 := false
  1242  		if ncolons == 2 {
  1243  			slice3 = true
  1244  			// Check presence of 2nd and 3rd index here rather than during type-checking
  1245  			// to prevent erroneous programs from passing through gofmt (was issue 7305).
  1246  			if index[1] == nil {
  1247  				p.error(colons[0], "2nd index required in 3-index slice")
  1248  				index[1] = &ast.BadExpr{From: colons[0] + 1, To: colons[1]}
  1249  			}
  1250  			if index[2] == nil {
  1251  				p.error(colons[1], "3rd index required in 3-index slice")
  1252  				index[2] = &ast.BadExpr{From: colons[1] + 1, To: rbrack}
  1253  			}
  1254  		}
  1255  		return &ast.SliceExpr{X: x, Lbrack: lbrack, Low: index[0], High: index[1], Max: index[2], Slice3: slice3, Rbrack: rbrack}
  1256  	}
  1257  
  1258  	return &ast.IndexExpr{X: x, Lbrack: lbrack, Index: index[0], Rbrack: rbrack}
  1259  }
  1260  
  1261  func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr {
  1262  	if p.trace {
  1263  		defer un(trace(p, "CallOrConversion"))
  1264  	}
  1265  
  1266  	lparen := p.expect(token.LPAREN)
  1267  	p.exprLev++
  1268  	var list []ast.Expr
  1269  	var ellipsis token.Pos
  1270  	for p.tok != token.RPAREN && p.tok != token.EOF && !ellipsis.IsValid() {
  1271  		list = append(list, p.parseRhsOrType()) // builtins may expect a type: make(some type, ...)
  1272  		if p.tok == token.ELLIPSIS {
  1273  			ellipsis = p.pos
  1274  			p.next()
  1275  		}
  1276  		if !p.atComma("argument list", token.RPAREN) {
  1277  			break
  1278  		}
  1279  		p.next()
  1280  	}
  1281  	p.exprLev--
  1282  	rparen := p.expectClosing(token.RPAREN, "argument list")
  1283  
  1284  	return &ast.CallExpr{Fun: fun, Lparen: lparen, Args: list, Ellipsis: ellipsis, Rparen: rparen}
  1285  }
  1286  
  1287  func (p *parser) parseValue(keyOk bool) ast.Expr {
  1288  	if p.trace {
  1289  		defer un(trace(p, "Element"))
  1290  	}
  1291  
  1292  	if p.tok == token.LBRACE {
  1293  		return p.parseLiteralValue(nil)
  1294  	}
  1295  
  1296  	// Because the parser doesn't know the composite literal type, it cannot
  1297  	// know if a key that's an identifier is a struct field name or a name
  1298  	// denoting a value. The former is not resolved by the parser or the
  1299  	// resolver.
  1300  	//
  1301  	// Instead, _try_ to resolve such a key if possible. If it resolves,
  1302  	// it a) has correctly resolved, or b) incorrectly resolved because
  1303  	// the key is a struct field with a name matching another identifier.
  1304  	// In the former case we are done, and in the latter case we don't
  1305  	// care because the type checker will do a separate field lookup.
  1306  	//
  1307  	// If the key does not resolve, it a) must be defined at the top
  1308  	// level in another file of the same package, the universe scope, or be
  1309  	// undeclared; or b) it is a struct field. In the former case, the type
  1310  	// checker can do a top-level lookup, and in the latter case it will do
  1311  	// a separate field lookup.
  1312  	x := p.checkExpr(p.parseExpr(keyOk))
  1313  	if keyOk {
  1314  		if p.tok == token.COLON {
  1315  			// Try to resolve the key but don't collect it
  1316  			// as unresolved identifier if it fails so that
  1317  			// we don't get (possibly false) errors about
  1318  			// undeclared names.
  1319  			p.tryResolve(x, false)
  1320  		} else {
  1321  			// not a key
  1322  			p.resolve(x)
  1323  		}
  1324  	}
  1325  
  1326  	return x
  1327  }
  1328  
  1329  func (p *parser) parseElement() ast.Expr {
  1330  	if p.trace {
  1331  		defer un(trace(p, "Element"))
  1332  	}
  1333  
  1334  	x := p.parseValue(true)
  1335  	if p.tok == token.COLON {
  1336  		colon := p.pos
  1337  		p.next()
  1338  		x = &ast.KeyValueExpr{Key: x, Colon: colon, Value: p.parseValue(false)}
  1339  	}
  1340  
  1341  	return x
  1342  }
  1343  
  1344  func (p *parser) parseElementList() (list []ast.Expr) {
  1345  	if p.trace {
  1346  		defer un(trace(p, "ElementList"))
  1347  	}
  1348  
  1349  	for p.tok != token.RBRACE && p.tok != token.EOF {
  1350  		list = append(list, p.parseElement())
  1351  		if !p.atComma("composite literal", token.RBRACE) {
  1352  			break
  1353  		}
  1354  		p.next()
  1355  	}
  1356  
  1357  	return
  1358  }
  1359  
  1360  func (p *parser) parseLiteralValue(typ ast.Expr) ast.Expr {
  1361  	if p.trace {
  1362  		defer un(trace(p, "LiteralValue"))
  1363  	}
  1364  
  1365  	lbrace := p.expect(token.LBRACE)
  1366  	var elts []ast.Expr
  1367  	p.exprLev++
  1368  	if p.tok != token.RBRACE {
  1369  		elts = p.parseElementList()
  1370  	}
  1371  	p.exprLev--
  1372  	rbrace := p.expectClosing(token.RBRACE, "composite literal")
  1373  	return &ast.CompositeLit{Type: typ, Lbrace: lbrace, Elts: elts, Rbrace: rbrace}
  1374  }
  1375  
  1376  // checkExpr checks that x is an expression (and not a type).
  1377  func (p *parser) checkExpr(x ast.Expr) ast.Expr {
  1378  	switch unparen(x).(type) {
  1379  	case *ast.BadExpr:
  1380  	case *ast.Ident:
  1381  	case *ast.BasicLit:
  1382  	case *ast.FuncLit:
  1383  	case *ast.CompositeLit:
  1384  	case *ast.ParenExpr:
  1385  		panic("unreachable")
  1386  	case *ast.SelectorExpr:
  1387  	case *ast.IndexExpr:
  1388  	case *ast.SliceExpr:
  1389  	case *ast.TypeAssertExpr:
  1390  		// If t.Type == nil we have a type assertion of the form
  1391  		// y.(type), which is only allowed in type switch expressions.
  1392  		// It's hard to exclude those but for the case where we are in
  1393  		// a type switch. Instead be lenient and test this in the type
  1394  		// checker.
  1395  	case *ast.CallExpr:
  1396  	case *ast.StarExpr:
  1397  	case *ast.UnaryExpr:
  1398  	case *ast.BinaryExpr:
  1399  	default:
  1400  		// all other nodes are not proper expressions
  1401  		p.errorExpected(x.Pos(), "expression")
  1402  		x = &ast.BadExpr{From: x.Pos(), To: p.safePos(x.End())}
  1403  	}
  1404  	return x
  1405  }
  1406  
  1407  // isTypeName reports whether x is a (qualified) TypeName.
  1408  func isTypeName(x ast.Expr) bool {
  1409  	switch t := x.(type) {
  1410  	case *ast.BadExpr:
  1411  	case *ast.Ident:
  1412  	case *ast.SelectorExpr:
  1413  		_, isIdent := t.X.(*ast.Ident)
  1414  		return isIdent
  1415  	default:
  1416  		return false // all other nodes are not type names
  1417  	}
  1418  	return true
  1419  }
  1420  
  1421  // isLiteralType reports whether x is a legal composite literal type.
  1422  func isLiteralType(x ast.Expr) bool {
  1423  	switch t := x.(type) {
  1424  	case *ast.BadExpr:
  1425  	case *ast.Ident:
  1426  	case *ast.SelectorExpr:
  1427  		_, isIdent := t.X.(*ast.Ident)
  1428  		return isIdent
  1429  	case *ast.ArrayType:
  1430  	case *ast.StructType:
  1431  	case *ast.MapType:
  1432  	default:
  1433  		return false // all other nodes are not legal composite literal types
  1434  	}
  1435  	return true
  1436  }
  1437  
  1438  // If x is of the form *T, deref returns T, otherwise it returns x.
  1439  func deref(x ast.Expr) ast.Expr {
  1440  	if p, isPtr := x.(*ast.StarExpr); isPtr {
  1441  		x = p.X
  1442  	}
  1443  	return x
  1444  }
  1445  
  1446  // If x is of the form (T), unparen returns unparen(T), otherwise it returns x.
  1447  func unparen(x ast.Expr) ast.Expr {
  1448  	if p, isParen := x.(*ast.ParenExpr); isParen {
  1449  		x = unparen(p.X)
  1450  	}
  1451  	return x
  1452  }
  1453  
  1454  // checkExprOrType checks that x is an expression or a type
  1455  // (and not a raw type such as [...]T).
  1456  //
  1457  func (p *parser) checkExprOrType(x ast.Expr) ast.Expr {
  1458  	switch t := unparen(x).(type) {
  1459  	case *ast.ParenExpr:
  1460  		panic("unreachable")
  1461  	case *ast.ArrayType:
  1462  		if len, isEllipsis := t.Len.(*ast.Ellipsis); isEllipsis {
  1463  			p.error(len.Pos(), "expected array length, found '...'")
  1464  			x = &ast.BadExpr{From: x.Pos(), To: p.safePos(x.End())}
  1465  		}
  1466  	}
  1467  
  1468  	// all other nodes are expressions or types
  1469  	return x
  1470  }
  1471  
  1472  // If lhs is set and the result is an identifier, it is not resolved.
  1473  func (p *parser) parsePrimaryExpr(lhs bool) ast.Expr {
  1474  	if p.trace {
  1475  		defer un(trace(p, "PrimaryExpr"))
  1476  	}
  1477  
  1478  	x := p.parseOperand(lhs)
  1479  L:
  1480  	for {
  1481  		switch p.tok {
  1482  		case token.PERIOD:
  1483  			p.next()
  1484  			if lhs {
  1485  				p.resolve(x)
  1486  			}
  1487  			switch p.tok {
  1488  			case token.IDENT:
  1489  				x = p.parseSelector(p.checkExprOrType(x))
  1490  			case token.LPAREN:
  1491  				x = p.parseTypeAssertion(p.checkExpr(x))
  1492  			default:
  1493  				pos := p.pos
  1494  				p.errorExpected(pos, "selector or type assertion")
  1495  				p.next() // make progress
  1496  				sel := &ast.Ident{NamePos: pos, Name: "_"}
  1497  				x = &ast.SelectorExpr{X: x, Sel: sel}
  1498  			}
  1499  		case token.LBRACK:
  1500  			if lhs {
  1501  				p.resolve(x)
  1502  			}
  1503  			x = p.parseIndexOrSlice(p.checkExpr(x))
  1504  		case token.LPAREN:
  1505  			if lhs {
  1506  				p.resolve(x)
  1507  			}
  1508  			x = p.parseCallOrConversion(p.checkExprOrType(x))
  1509  		case token.LBRACE:
  1510  			if isLiteralType(x) && (p.exprLev >= 0 || !isTypeName(x)) {
  1511  				if lhs {
  1512  					p.resolve(x)
  1513  				}
  1514  				x = p.parseLiteralValue(x)
  1515  			} else {
  1516  				break L
  1517  			}
  1518  		default:
  1519  			break L
  1520  		}
  1521  		lhs = false // no need to try to resolve again
  1522  	}
  1523  
  1524  	return x
  1525  }
  1526  
  1527  // If lhs is set and the result is an identifier, it is not resolved.
  1528  func (p *parser) parseUnaryExpr(lhs bool) ast.Expr {
  1529  	if p.trace {
  1530  		defer un(trace(p, "UnaryExpr"))
  1531  	}
  1532  
  1533  	switch p.tok {
  1534  	case token.ADD, token.SUB, token.NOT, token.XOR, token.AND:
  1535  		pos, op := p.pos, p.tok
  1536  		p.next()
  1537  		x := p.parseUnaryExpr(false)
  1538  		return &ast.UnaryExpr{OpPos: pos, Op: op, X: p.checkExpr(x)}
  1539  
  1540  	case token.ARROW:
  1541  		// channel type or receive expression
  1542  		arrow := p.pos
  1543  		p.next()
  1544  
  1545  		// If the next token is token.CHAN we still don't know if it
  1546  		// is a channel type or a receive operation - we only know
  1547  		// once we have found the end of the unary expression. There
  1548  		// are two cases:
  1549  		//
  1550  		//   <- type  => (<-type) must be channel type
  1551  		//   <- expr  => <-(expr) is a receive from an expression
  1552  		//
  1553  		// In the first case, the arrow must be re-associated with
  1554  		// the channel type parsed already:
  1555  		//
  1556  		//   <- (chan type)    =>  (<-chan type)
  1557  		//   <- (chan<- type)  =>  (<-chan (<-type))
  1558  
  1559  		x := p.parseUnaryExpr(false)
  1560  
  1561  		// determine which case we have
  1562  		if typ, ok := x.(*ast.ChanType); ok {
  1563  			// (<-type)
  1564  
  1565  			// re-associate position info and <-
  1566  			dir := ast.SEND
  1567  			for ok && dir == ast.SEND {
  1568  				if typ.Dir == ast.RECV {
  1569  					// error: (<-type) is (<-(<-chan T))
  1570  					p.errorExpected(typ.Arrow, "'chan'")
  1571  				}
  1572  				arrow, typ.Begin, typ.Arrow = typ.Arrow, arrow, arrow
  1573  				dir, typ.Dir = typ.Dir, ast.RECV
  1574  				typ, ok = typ.Value.(*ast.ChanType)
  1575  			}
  1576  			if dir == ast.SEND {
  1577  				p.errorExpected(arrow, "channel type")
  1578  			}
  1579  
  1580  			return x
  1581  		}
  1582  
  1583  		// <-(expr)
  1584  		return &ast.UnaryExpr{OpPos: arrow, Op: token.ARROW, X: p.checkExpr(x)}
  1585  
  1586  	case token.MUL:
  1587  		// pointer type or unary "*" expression
  1588  		pos := p.pos
  1589  		p.next()
  1590  		x := p.parseUnaryExpr(false)
  1591  		return &ast.StarExpr{Star: pos, X: p.checkExprOrType(x)}
  1592  	}
  1593  
  1594  	return p.parsePrimaryExpr(lhs)
  1595  }
  1596  
  1597  func (p *parser) tokPrec() (token.Token, int) {
  1598  	tok := p.tok
  1599  	if p.inRhs && tok == token.ASSIGN {
  1600  		tok = token.EQL
  1601  	}
  1602  	return tok, tok.Precedence()
  1603  }
  1604  
  1605  // If lhs is set and the result is an identifier, it is not resolved.
  1606  func (p *parser) parseBinaryExpr(lhs bool, prec1 int) ast.Expr {
  1607  	if p.trace {
  1608  		defer un(trace(p, "BinaryExpr"))
  1609  	}
  1610  
  1611  	x := p.parseUnaryExpr(lhs)
  1612  	for {
  1613  		op, oprec := p.tokPrec()
  1614  		if oprec < prec1 {
  1615  			return x
  1616  		}
  1617  		pos := p.expect(op)
  1618  		if lhs {
  1619  			p.resolve(x)
  1620  			lhs = false
  1621  		}
  1622  		y := p.parseBinaryExpr(false, oprec+1)
  1623  		x = &ast.BinaryExpr{X: p.checkExpr(x), OpPos: pos, Op: op, Y: p.checkExpr(y)}
  1624  	}
  1625  }
  1626  
  1627  // If lhs is set and the result is an identifier, it is not resolved.
  1628  // The result may be a type or even a raw type ([...]int). Callers must
  1629  // check the result (using checkExpr or checkExprOrType), depending on
  1630  // context.
  1631  func (p *parser) parseExpr(lhs bool) ast.Expr {
  1632  	if p.trace {
  1633  		defer un(trace(p, "Expression"))
  1634  	}
  1635  
  1636  	return p.parseBinaryExpr(lhs, token.LowestPrec+1)
  1637  }
  1638  
  1639  func (p *parser) parseRhs() ast.Expr {
  1640  	old := p.inRhs
  1641  	p.inRhs = true
  1642  	x := p.checkExpr(p.parseExpr(false))
  1643  	p.inRhs = old
  1644  	return x
  1645  }
  1646  
  1647  func (p *parser) parseRhsOrType() ast.Expr {
  1648  	old := p.inRhs
  1649  	p.inRhs = true
  1650  	x := p.checkExprOrType(p.parseExpr(false))
  1651  	p.inRhs = old
  1652  	return x
  1653  }
  1654  
  1655  // ----------------------------------------------------------------------------
  1656  // Statements
  1657  
  1658  // Parsing modes for parseSimpleStmt.
  1659  const (
  1660  	basic = iota
  1661  	labelOk
  1662  	rangeOk
  1663  )
  1664  
  1665  // parseSimpleStmt returns true as 2nd result if it parsed the assignment
  1666  // of a range clause (with mode == rangeOk). The returned statement is an
  1667  // assignment with a right-hand side that is a single unary expression of
  1668  // the form "range x". No guarantees are given for the left-hand side.
  1669  func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) {
  1670  	if p.trace {
  1671  		defer un(trace(p, "SimpleStmt"))
  1672  	}
  1673  
  1674  	x := p.parseLhsList()
  1675  
  1676  	switch p.tok {
  1677  	case
  1678  		token.DEFINE, token.ASSIGN, token.ADD_ASSIGN,
  1679  		token.SUB_ASSIGN, token.MUL_ASSIGN, token.QUO_ASSIGN,
  1680  		token.REM_ASSIGN, token.AND_ASSIGN, token.OR_ASSIGN,
  1681  		token.XOR_ASSIGN, token.SHL_ASSIGN, token.SHR_ASSIGN, token.AND_NOT_ASSIGN:
  1682  		// assignment statement, possibly part of a range clause
  1683  		pos, tok := p.pos, p.tok
  1684  		p.next()
  1685  		var y []ast.Expr
  1686  		isRange := false
  1687  		if mode == rangeOk && p.tok == token.RANGE && (tok == token.DEFINE || tok == token.ASSIGN) {
  1688  			pos := p.pos
  1689  			p.next()
  1690  			y = []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
  1691  			isRange = true
  1692  		} else {
  1693  			y = p.parseRhsList()
  1694  		}
  1695  		as := &ast.AssignStmt{Lhs: x, TokPos: pos, Tok: tok, Rhs: y}
  1696  		if tok == token.DEFINE {
  1697  			p.shortVarDecl(as, x)
  1698  		}
  1699  		return as, isRange
  1700  	}
  1701  
  1702  	if len(x) > 1 {
  1703  		p.errorExpected(x[0].Pos(), "1 expression")
  1704  		// continue with first expression
  1705  	}
  1706  
  1707  	switch p.tok {
  1708  	case token.COLON:
  1709  		// labeled statement
  1710  		colon := p.pos
  1711  		p.next()
  1712  		if label, isIdent := x[0].(*ast.Ident); mode == labelOk && isIdent {
  1713  			// Go spec: The scope of a label is the body of the function
  1714  			// in which it is declared and excludes the body of any nested
  1715  			// function.
  1716  			stmt := &ast.LabeledStmt{Label: label, Colon: colon, Stmt: p.parseStmt()}
  1717  			p.declare(stmt, nil, p.labelScope, ast.Lbl, label)
  1718  			return stmt, false
  1719  		}
  1720  		// The label declaration typically starts at x[0].Pos(), but the label
  1721  		// declaration may be erroneous due to a token after that position (and
  1722  		// before the ':'). If SpuriousErrors is not set, the (only) error
  1723  		// reported for the line is the illegal label error instead of the token
  1724  		// before the ':' that caused the problem. Thus, use the (latest) colon
  1725  		// position for error reporting.
  1726  		p.error(colon, "illegal label declaration")
  1727  		return &ast.BadStmt{From: x[0].Pos(), To: colon + 1}, false
  1728  
  1729  	case token.ARROW:
  1730  		// send statement
  1731  		arrow := p.pos
  1732  		p.next()
  1733  		y := p.parseRhs()
  1734  		return &ast.SendStmt{Chan: x[0], Arrow: arrow, Value: y}, false
  1735  
  1736  	case token.INC, token.DEC:
  1737  		// increment or decrement
  1738  		s := &ast.IncDecStmt{X: x[0], TokPos: p.pos, Tok: p.tok}
  1739  		p.next()
  1740  		return s, false
  1741  	}
  1742  
  1743  	// expression
  1744  	return &ast.ExprStmt{X: x[0]}, false
  1745  }
  1746  
  1747  func (p *parser) parseCallExpr(callType string) *ast.CallExpr {
  1748  	x := p.parseRhsOrType() // could be a conversion: (some type)(x)
  1749  	if call, isCall := x.(*ast.CallExpr); isCall {
  1750  		return call
  1751  	}
  1752  	if _, isBad := x.(*ast.BadExpr); !isBad {
  1753  		// only report error if it's a new one
  1754  		p.error(p.safePos(x.End()), fmt.Sprintf("function must be invoked in %s statement", callType))
  1755  	}
  1756  	return nil
  1757  }
  1758  
  1759  func (p *parser) parseGoStmt() ast.Stmt {
  1760  	if p.trace {
  1761  		defer un(trace(p, "GoStmt"))
  1762  	}
  1763  
  1764  	pos := p.expect(token.GO)
  1765  	call := p.parseCallExpr("go")
  1766  	p.expectSemi()
  1767  	if call == nil {
  1768  		return &ast.BadStmt{From: pos, To: pos + 2} // len("go")
  1769  	}
  1770  
  1771  	return &ast.GoStmt{Go: pos, Call: call}
  1772  }
  1773  
  1774  func (p *parser) parseDeferStmt() ast.Stmt {
  1775  	if p.trace {
  1776  		defer un(trace(p, "DeferStmt"))
  1777  	}
  1778  
  1779  	pos := p.expect(token.DEFER)
  1780  	call := p.parseCallExpr("defer")
  1781  	p.expectSemi()
  1782  	if call == nil {
  1783  		return &ast.BadStmt{From: pos, To: pos + 5} // len("defer")
  1784  	}
  1785  
  1786  	return &ast.DeferStmt{Defer: pos, Call: call}
  1787  }
  1788  
  1789  func (p *parser) parseReturnStmt() *ast.ReturnStmt {
  1790  	if p.trace {
  1791  		defer un(trace(p, "ReturnStmt"))
  1792  	}
  1793  
  1794  	pos := p.pos
  1795  	p.expect(token.RETURN)
  1796  	var x []ast.Expr
  1797  	if p.tok != token.SEMICOLON && p.tok != token.RBRACE {
  1798  		x = p.parseRhsList()
  1799  	}
  1800  	p.expectSemi()
  1801  
  1802  	return &ast.ReturnStmt{Return: pos, Results: x}
  1803  }
  1804  
  1805  func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt {
  1806  	if p.trace {
  1807  		defer un(trace(p, "BranchStmt"))
  1808  	}
  1809  
  1810  	pos := p.expect(tok)
  1811  	var label *ast.Ident
  1812  	if tok != token.FALLTHROUGH && p.tok == token.IDENT {
  1813  		label = p.parseIdent()
  1814  		// add to list of unresolved targets
  1815  		n := len(p.targetStack) - 1
  1816  		p.targetStack[n] = append(p.targetStack[n], label)
  1817  	}
  1818  	p.expectSemi()
  1819  
  1820  	return &ast.BranchStmt{TokPos: pos, Tok: tok, Label: label}
  1821  }
  1822  
  1823  func (p *parser) makeExpr(s ast.Stmt, want string) ast.Expr {
  1824  	if s == nil {
  1825  		return nil
  1826  	}
  1827  	if es, isExpr := s.(*ast.ExprStmt); isExpr {
  1828  		return p.checkExpr(es.X)
  1829  	}
  1830  	found := "simple statement"
  1831  	if _, isAss := s.(*ast.AssignStmt); isAss {
  1832  		found = "assignment"
  1833  	}
  1834  	p.error(s.Pos(), fmt.Sprintf("expected %s, found %s (missing parentheses around composite literal?)", want, found))
  1835  	return &ast.BadExpr{From: s.Pos(), To: p.safePos(s.End())}
  1836  }
  1837  
  1838  // parseIfHeader is an adjusted version of parser.header
  1839  // in cmd/compile/internal/syntax/parser.go, which has
  1840  // been tuned for better error handling.
  1841  func (p *parser) parseIfHeader() (init ast.Stmt, cond ast.Expr) {
  1842  	if p.tok == token.LBRACE {
  1843  		p.error(p.pos, "missing condition in if statement")
  1844  		cond = &ast.BadExpr{From: p.pos, To: p.pos}
  1845  		return
  1846  	}
  1847  	// p.tok != token.LBRACE
  1848  
  1849  	outer := p.exprLev
  1850  	p.exprLev = -1
  1851  
  1852  	if p.tok != token.SEMICOLON {
  1853  		// accept potential variable declaration but complain
  1854  		if p.tok == token.VAR {
  1855  			p.next()
  1856  			p.error(p.pos, fmt.Sprintf("var declaration not allowed in 'IF' initializer"))
  1857  		}
  1858  		init, _ = p.parseSimpleStmt(basic)
  1859  	}
  1860  
  1861  	var condStmt ast.Stmt
  1862  	var semi struct {
  1863  		pos token.Pos
  1864  		lit string // ";" or "\n"; valid if pos.IsValid()
  1865  	}
  1866  	if p.tok != token.LBRACE {
  1867  		if p.tok == token.SEMICOLON {
  1868  			semi.pos = p.pos
  1869  			semi.lit = p.lit
  1870  			p.next()
  1871  		} else {
  1872  			p.expect(token.SEMICOLON)
  1873  		}
  1874  		if p.tok != token.LBRACE {
  1875  			condStmt, _ = p.parseSimpleStmt(basic)
  1876  		}
  1877  	} else {
  1878  		condStmt = init
  1879  		init = nil
  1880  	}
  1881  
  1882  	if condStmt != nil {
  1883  		cond = p.makeExpr(condStmt, "boolean expression")
  1884  	} else if semi.pos.IsValid() {
  1885  		if semi.lit == "\n" {
  1886  			p.error(semi.pos, "unexpected newline, expecting { after if clause")
  1887  		} else {
  1888  			p.error(semi.pos, "missing condition in if statement")
  1889  		}
  1890  	}
  1891  
  1892  	// make sure we have a valid AST
  1893  	if cond == nil {
  1894  		cond = &ast.BadExpr{From: p.pos, To: p.pos}
  1895  	}
  1896  
  1897  	p.exprLev = outer
  1898  	return
  1899  }
  1900  
  1901  func (p *parser) parseIfStmt() *ast.IfStmt {
  1902  	if p.trace {
  1903  		defer un(trace(p, "IfStmt"))
  1904  	}
  1905  
  1906  	pos := p.expect(token.IF)
  1907  	p.openScope()
  1908  	defer p.closeScope()
  1909  
  1910  	init, cond := p.parseIfHeader()
  1911  	body := p.parseBlockStmt()
  1912  
  1913  	var else_ ast.Stmt
  1914  	if p.tok == token.ELSE {
  1915  		p.next()
  1916  		switch p.tok {
  1917  		case token.IF:
  1918  			else_ = p.parseIfStmt()
  1919  		case token.LBRACE:
  1920  			else_ = p.parseBlockStmt()
  1921  			p.expectSemi()
  1922  		default:
  1923  			p.errorExpected(p.pos, "if statement or block")
  1924  			else_ = &ast.BadStmt{From: p.pos, To: p.pos}
  1925  		}
  1926  	} else {
  1927  		p.expectSemi()
  1928  	}
  1929  
  1930  	return &ast.IfStmt{If: pos, Init: init, Cond: cond, Body: body, Else: else_}
  1931  }
  1932  
  1933  func (p *parser) parseTypeList() (list []ast.Expr) {
  1934  	if p.trace {
  1935  		defer un(trace(p, "TypeList"))
  1936  	}
  1937  
  1938  	list = append(list, p.parseType())
  1939  	for p.tok == token.COMMA {
  1940  		p.next()
  1941  		list = append(list, p.parseType())
  1942  	}
  1943  
  1944  	return
  1945  }
  1946  
  1947  func (p *parser) parseCaseClause(typeSwitch bool) *ast.CaseClause {
  1948  	if p.trace {
  1949  		defer un(trace(p, "CaseClause"))
  1950  	}
  1951  
  1952  	pos := p.pos
  1953  	var list []ast.Expr
  1954  	if p.tok == token.CASE {
  1955  		p.next()
  1956  		if typeSwitch {
  1957  			list = p.parseTypeList()
  1958  		} else {
  1959  			list = p.parseRhsList()
  1960  		}
  1961  	} else {
  1962  		p.expect(token.DEFAULT)
  1963  	}
  1964  
  1965  	colon := p.expect(token.COLON)
  1966  	p.openScope()
  1967  	body := p.parseStmtList()
  1968  	p.closeScope()
  1969  
  1970  	return &ast.CaseClause{Case: pos, List: list, Colon: colon, Body: body}
  1971  }
  1972  
  1973  func isTypeSwitchAssert(x ast.Expr) bool {
  1974  	a, ok := x.(*ast.TypeAssertExpr)
  1975  	return ok && a.Type == nil
  1976  }
  1977  
  1978  func (p *parser) isTypeSwitchGuard(s ast.Stmt) bool {
  1979  	switch t := s.(type) {
  1980  	case *ast.ExprStmt:
  1981  		// x.(type)
  1982  		return isTypeSwitchAssert(t.X)
  1983  	case *ast.AssignStmt:
  1984  		// v := x.(type)
  1985  		if len(t.Lhs) == 1 && len(t.Rhs) == 1 && isTypeSwitchAssert(t.Rhs[0]) {
  1986  			switch t.Tok {
  1987  			case token.ASSIGN:
  1988  				// permit v = x.(type) but complain
  1989  				p.error(t.TokPos, "expected ':=', found '='")
  1990  				fallthrough
  1991  			case token.DEFINE:
  1992  				return true
  1993  			}
  1994  		}
  1995  	}
  1996  	return false
  1997  }
  1998  
  1999  func (p *parser) parseSwitchStmt() ast.Stmt {
  2000  	if p.trace {
  2001  		defer un(trace(p, "SwitchStmt"))
  2002  	}
  2003  
  2004  	pos := p.expect(token.SWITCH)
  2005  	p.openScope()
  2006  	defer p.closeScope()
  2007  
  2008  	var s1, s2 ast.Stmt
  2009  	if p.tok != token.LBRACE {
  2010  		prevLev := p.exprLev
  2011  		p.exprLev = -1
  2012  		if p.tok != token.SEMICOLON {
  2013  			s2, _ = p.parseSimpleStmt(basic)
  2014  		}
  2015  		if p.tok == token.SEMICOLON {
  2016  			p.next()
  2017  			s1 = s2
  2018  			s2 = nil
  2019  			if p.tok != token.LBRACE {
  2020  				// A TypeSwitchGuard may declare a variable in addition
  2021  				// to the variable declared in the initial SimpleStmt.
  2022  				// Introduce extra scope to avoid redeclaration errors:
  2023  				//
  2024  				//	switch t := 0; t := x.(T) { ... }
  2025  				//
  2026  				// (this code is not valid Go because the first t
  2027  				// cannot be accessed and thus is never used, the extra
  2028  				// scope is needed for the correct error message).
  2029  				//
  2030  				// If we don't have a type switch, s2 must be an expression.
  2031  				// Having the extra nested but empty scope won't affect it.
  2032  				p.openScope()
  2033  				defer p.closeScope()
  2034  				s2, _ = p.parseSimpleStmt(basic)
  2035  			}
  2036  		}
  2037  		p.exprLev = prevLev
  2038  	}
  2039  
  2040  	typeSwitch := p.isTypeSwitchGuard(s2)
  2041  	lbrace := p.expect(token.LBRACE)
  2042  	var list []ast.Stmt
  2043  	for p.tok == token.CASE || p.tok == token.DEFAULT {
  2044  		list = append(list, p.parseCaseClause(typeSwitch))
  2045  	}
  2046  	rbrace := p.expect(token.RBRACE)
  2047  	p.expectSemi()
  2048  	body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
  2049  
  2050  	if typeSwitch {
  2051  		return &ast.TypeSwitchStmt{Switch: pos, Init: s1, Assign: s2, Body: body}
  2052  	}
  2053  
  2054  	return &ast.SwitchStmt{Switch: pos, Init: s1, Tag: p.makeExpr(s2, "switch expression"), Body: body}
  2055  }
  2056  
  2057  func (p *parser) parseCommClause() *ast.CommClause {
  2058  	if p.trace {
  2059  		defer un(trace(p, "CommClause"))
  2060  	}
  2061  
  2062  	p.openScope()
  2063  	pos := p.pos
  2064  	var comm ast.Stmt
  2065  	if p.tok == token.CASE {
  2066  		p.next()
  2067  		lhs := p.parseLhsList()
  2068  		if p.tok == token.ARROW {
  2069  			// SendStmt
  2070  			if len(lhs) > 1 {
  2071  				p.errorExpected(lhs[0].Pos(), "1 expression")
  2072  				// continue with first expression
  2073  			}
  2074  			arrow := p.pos
  2075  			p.next()
  2076  			rhs := p.parseRhs()
  2077  			comm = &ast.SendStmt{Chan: lhs[0], Arrow: arrow, Value: rhs}
  2078  		} else {
  2079  			// RecvStmt
  2080  			if tok := p.tok; tok == token.ASSIGN || tok == token.DEFINE {
  2081  				// RecvStmt with assignment
  2082  				if len(lhs) > 2 {
  2083  					p.errorExpected(lhs[0].Pos(), "1 or 2 expressions")
  2084  					// continue with first two expressions
  2085  					lhs = lhs[0:2]
  2086  				}
  2087  				pos := p.pos
  2088  				p.next()
  2089  				rhs := p.parseRhs()
  2090  				as := &ast.AssignStmt{Lhs: lhs, TokPos: pos, Tok: tok, Rhs: []ast.Expr{rhs}}
  2091  				if tok == token.DEFINE {
  2092  					p.shortVarDecl(as, lhs)
  2093  				}
  2094  				comm = as
  2095  			} else {
  2096  				// lhs must be single receive operation
  2097  				if len(lhs) > 1 {
  2098  					p.errorExpected(lhs[0].Pos(), "1 expression")
  2099  					// continue with first expression
  2100  				}
  2101  				comm = &ast.ExprStmt{X: lhs[0]}
  2102  			}
  2103  		}
  2104  	} else {
  2105  		p.expect(token.DEFAULT)
  2106  	}
  2107  
  2108  	colon := p.expect(token.COLON)
  2109  	body := p.parseStmtList()
  2110  	p.closeScope()
  2111  
  2112  	return &ast.CommClause{Case: pos, Comm: comm, Colon: colon, Body: body}
  2113  }
  2114  
  2115  func (p *parser) parseSelectStmt() *ast.SelectStmt {
  2116  	if p.trace {
  2117  		defer un(trace(p, "SelectStmt"))
  2118  	}
  2119  
  2120  	pos := p.expect(token.SELECT)
  2121  	lbrace := p.expect(token.LBRACE)
  2122  	var list []ast.Stmt
  2123  	for p.tok == token.CASE || p.tok == token.DEFAULT {
  2124  		list = append(list, p.parseCommClause())
  2125  	}
  2126  	rbrace := p.expect(token.RBRACE)
  2127  	p.expectSemi()
  2128  	body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
  2129  
  2130  	return &ast.SelectStmt{Select: pos, Body: body}
  2131  }
  2132  
  2133  func (p *parser) parseForStmt() ast.Stmt {
  2134  	if p.trace {
  2135  		defer un(trace(p, "ForStmt"))
  2136  	}
  2137  
  2138  	pos := p.expect(token.FOR)
  2139  	p.openScope()
  2140  	defer p.closeScope()
  2141  
  2142  	var s1, s2, s3 ast.Stmt
  2143  	var isRange bool
  2144  	if p.tok != token.LBRACE {
  2145  		prevLev := p.exprLev
  2146  		p.exprLev = -1
  2147  		if p.tok != token.SEMICOLON {
  2148  			if p.tok == token.RANGE {
  2149  				// "for range x" (nil lhs in assignment)
  2150  				pos := p.pos
  2151  				p.next()
  2152  				y := []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
  2153  				s2 = &ast.AssignStmt{Rhs: y}
  2154  				isRange = true
  2155  			} else {
  2156  				s2, isRange = p.parseSimpleStmt(rangeOk)
  2157  			}
  2158  		}
  2159  		if !isRange && p.tok == token.SEMICOLON {
  2160  			p.next()
  2161  			s1 = s2
  2162  			s2 = nil
  2163  			if p.tok != token.SEMICOLON {
  2164  				s2, _ = p.parseSimpleStmt(basic)
  2165  			}
  2166  			p.expectSemi()
  2167  			if p.tok != token.LBRACE {
  2168  				s3, _ = p.parseSimpleStmt(basic)
  2169  			}
  2170  		}
  2171  		p.exprLev = prevLev
  2172  	}
  2173  
  2174  	body := p.parseBlockStmt()
  2175  	p.expectSemi()
  2176  
  2177  	if isRange {
  2178  		as := s2.(*ast.AssignStmt)
  2179  		// check lhs
  2180  		var key, value ast.Expr
  2181  		switch len(as.Lhs) {
  2182  		case 0:
  2183  			// nothing to do
  2184  		case 1:
  2185  			key = as.Lhs[0]
  2186  		case 2:
  2187  			key, value = as.Lhs[0], as.Lhs[1]
  2188  		default:
  2189  			p.errorExpected(as.Lhs[len(as.Lhs)-1].Pos(), "at most 2 expressions")
  2190  			return &ast.BadStmt{From: pos, To: p.safePos(body.End())}
  2191  		}
  2192  		// parseSimpleStmt returned a right-hand side that
  2193  		// is a single unary expression of the form "range x"
  2194  		x := as.Rhs[0].(*ast.UnaryExpr).X
  2195  		return &ast.RangeStmt{
  2196  			For:    pos,
  2197  			Key:    key,
  2198  			Value:  value,
  2199  			TokPos: as.TokPos,
  2200  			Tok:    as.Tok,
  2201  			X:      x,
  2202  			Body:   body,
  2203  		}
  2204  	}
  2205  
  2206  	// regular for statement
  2207  	return &ast.ForStmt{
  2208  		For:  pos,
  2209  		Init: s1,
  2210  		Cond: p.makeExpr(s2, "boolean or range expression"),
  2211  		Post: s3,
  2212  		Body: body,
  2213  	}
  2214  }
  2215  
  2216  func (p *parser) parseStmt() (s ast.Stmt) {
  2217  	if p.trace {
  2218  		defer un(trace(p, "Statement"))
  2219  	}
  2220  
  2221  	switch p.tok {
  2222  	case token.CONST, token.TYPE, token.VAR:
  2223  		s = &ast.DeclStmt{Decl: p.parseDecl(stmtStart)}
  2224  	case
  2225  		// tokens that may start an expression
  2226  		token.IDENT, token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING, token.FUNC, token.LPAREN, // operands
  2227  		token.LBRACK, token.STRUCT, token.MAP, token.CHAN, token.INTERFACE, // composite types
  2228  		token.ADD, token.SUB, token.MUL, token.AND, token.XOR, token.ARROW, token.NOT: // unary operators
  2229  		s, _ = p.parseSimpleStmt(labelOk)
  2230  		// because of the required look-ahead, labeled statements are
  2231  		// parsed by parseSimpleStmt - don't expect a semicolon after
  2232  		// them
  2233  		if _, isLabeledStmt := s.(*ast.LabeledStmt); !isLabeledStmt {
  2234  			p.expectSemi()
  2235  		}
  2236  	case token.GO:
  2237  		s = p.parseGoStmt()
  2238  	case token.DEFER:
  2239  		s = p.parseDeferStmt()
  2240  	case token.RETURN:
  2241  		s = p.parseReturnStmt()
  2242  	case token.BREAK, token.CONTINUE, token.GOTO, token.FALLTHROUGH:
  2243  		s = p.parseBranchStmt(p.tok)
  2244  	case token.LBRACE:
  2245  		s = p.parseBlockStmt()
  2246  		p.expectSemi()
  2247  	case token.IF:
  2248  		s = p.parseIfStmt()
  2249  	case token.SWITCH:
  2250  		s = p.parseSwitchStmt()
  2251  	case token.SELECT:
  2252  		s = p.parseSelectStmt()
  2253  	case token.FOR:
  2254  		s = p.parseForStmt()
  2255  	case token.SEMICOLON:
  2256  		// Is it ever possible to have an implicit semicolon
  2257  		// producing an empty statement in a valid program?
  2258  		// (handle correctly anyway)
  2259  		s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: p.lit == "\n"}
  2260  		p.next()
  2261  	case token.RBRACE:
  2262  		// a semicolon may be omitted before a closing "}"
  2263  		s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: true}
  2264  	default:
  2265  		// no statement found
  2266  		pos := p.pos
  2267  		p.errorExpected(pos, "statement")
  2268  		p.advance(stmtStart)
  2269  		s = &ast.BadStmt{From: pos, To: p.pos}
  2270  	}
  2271  
  2272  	return
  2273  }
  2274  
  2275  // ----------------------------------------------------------------------------
  2276  // Declarations
  2277  
  2278  type parseSpecFunction func(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec
  2279  
  2280  func isValidImport(lit string) bool {
  2281  	const illegalChars = `!"#$%&'()*,:;<=>?[\]^{|}` + "`\uFFFD"
  2282  	s, _ := strconv.Unquote(lit) // go/scanner returns a legal string literal
  2283  	for _, r := range s {
  2284  		if !unicode.IsGraphic(r) || unicode.IsSpace(r) || strings.ContainsRune(illegalChars, r) {
  2285  			return false
  2286  		}
  2287  	}
  2288  	return s != ""
  2289  }
  2290  
  2291  func (p *parser) parseImportSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
  2292  	if p.trace {
  2293  		defer un(trace(p, "ImportSpec"))
  2294  	}
  2295  
  2296  	var ident *ast.Ident
  2297  	switch p.tok {
  2298  	case token.PERIOD:
  2299  		ident = &ast.Ident{NamePos: p.pos, Name: "."}
  2300  		p.next()
  2301  	case token.IDENT:
  2302  		ident = p.parseIdent()
  2303  	}
  2304  
  2305  	pos := p.pos
  2306  	var path string
  2307  	if p.tok == token.STRING {
  2308  		path = p.lit
  2309  		if !isValidImport(path) {
  2310  			p.error(pos, "invalid import path: "+path)
  2311  		}
  2312  		p.next()
  2313  	} else {
  2314  		p.expect(token.STRING) // use expect() error handling
  2315  	}
  2316  	p.expectSemi() // call before accessing p.linecomment
  2317  
  2318  	// collect imports
  2319  	spec := &ast.ImportSpec{
  2320  		Doc:     doc,
  2321  		Name:    ident,
  2322  		Path:    &ast.BasicLit{ValuePos: pos, Kind: token.STRING, Value: path},
  2323  		Comment: p.lineComment,
  2324  	}
  2325  	p.imports = append(p.imports, spec)
  2326  
  2327  	return spec
  2328  }
  2329  
  2330  func (p *parser) parseValueSpec(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec {
  2331  	if p.trace {
  2332  		defer un(trace(p, keyword.String()+"Spec"))
  2333  	}
  2334  
  2335  	pos := p.pos
  2336  	idents := p.parseIdentList()
  2337  	typ := p.tryType()
  2338  	var values []ast.Expr
  2339  	// always permit optional initialization for more tolerant parsing
  2340  	if p.tok == token.ASSIGN {
  2341  		p.next()
  2342  		values = p.parseRhsList()
  2343  	}
  2344  	p.expectSemi() // call before accessing p.linecomment
  2345  
  2346  	switch keyword {
  2347  	case token.VAR:
  2348  		if typ == nil && values == nil {
  2349  			p.error(pos, "missing variable type or initialization")
  2350  		}
  2351  	case token.CONST:
  2352  		if values == nil && (iota == 0 || typ != nil) {
  2353  			p.error(pos, "missing constant value")
  2354  		}
  2355  	}
  2356  
  2357  	// Go spec: The scope of a constant or variable identifier declared inside
  2358  	// a function begins at the end of the ConstSpec or VarSpec and ends at
  2359  	// the end of the innermost containing block.
  2360  	// (Global identifiers are resolved in a separate phase after parsing.)
  2361  	spec := &ast.ValueSpec{
  2362  		Doc:     doc,
  2363  		Names:   idents,
  2364  		Type:    typ,
  2365  		Values:  values,
  2366  		Comment: p.lineComment,
  2367  	}
  2368  	kind := ast.Con
  2369  	if keyword == token.VAR {
  2370  		kind = ast.Var
  2371  	}
  2372  	p.declare(spec, iota, p.topScope, kind, idents...)
  2373  
  2374  	return spec
  2375  }
  2376  
  2377  func (p *parser) parseTypeSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
  2378  	if p.trace {
  2379  		defer un(trace(p, "TypeSpec"))
  2380  	}
  2381  
  2382  	ident := p.parseIdent()
  2383  
  2384  	// Go spec: The scope of a type identifier declared inside a function begins
  2385  	// at the identifier in the TypeSpec and ends at the end of the innermost
  2386  	// containing block.
  2387  	// (Global identifiers are resolved in a separate phase after parsing.)
  2388  	spec := &ast.TypeSpec{Doc: doc, Name: ident}
  2389  	p.declare(spec, nil, p.topScope, ast.Typ, ident)
  2390  	if p.tok == token.ASSIGN {
  2391  		spec.Assign = p.pos
  2392  		p.next()
  2393  	}
  2394  	spec.Type = p.parseType()
  2395  	p.expectSemi() // call before accessing p.linecomment
  2396  	spec.Comment = p.lineComment
  2397  
  2398  	return spec
  2399  }
  2400  
  2401  func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl {
  2402  	if p.trace {
  2403  		defer un(trace(p, "GenDecl("+keyword.String()+")"))
  2404  	}
  2405  
  2406  	doc := p.leadComment
  2407  	pos := p.expect(keyword)
  2408  	var lparen, rparen token.Pos
  2409  	var list []ast.Spec
  2410  	if p.tok == token.LPAREN {
  2411  		lparen = p.pos
  2412  		p.next()
  2413  		for iota := 0; p.tok != token.RPAREN && p.tok != token.EOF; iota++ {
  2414  			list = append(list, f(p.leadComment, keyword, iota))
  2415  		}
  2416  		rparen = p.expect(token.RPAREN)
  2417  		p.expectSemi()
  2418  	} else {
  2419  		list = append(list, f(nil, keyword, 0))
  2420  	}
  2421  
  2422  	return &ast.GenDecl{
  2423  		Doc:    doc,
  2424  		TokPos: pos,
  2425  		Tok:    keyword,
  2426  		Lparen: lparen,
  2427  		Specs:  list,
  2428  		Rparen: rparen,
  2429  	}
  2430  }
  2431  
  2432  func (p *parser) parseFuncDecl() *ast.FuncDecl {
  2433  	if p.trace {
  2434  		defer un(trace(p, "FunctionDecl"))
  2435  	}
  2436  
  2437  	doc := p.leadComment
  2438  	pos := p.expect(token.FUNC)
  2439  	scope := ast.NewScope(p.topScope) // function scope
  2440  
  2441  	var recv *ast.FieldList
  2442  	if p.tok == token.LPAREN {
  2443  		recv = p.parseParameters(scope, false)
  2444  	}
  2445  
  2446  	ident := p.parseIdent()
  2447  
  2448  	params, results := p.parseSignature(scope)
  2449  
  2450  	var body *ast.BlockStmt
  2451  	if p.tok == token.LBRACE {
  2452  		body = p.parseBody(scope)
  2453  		p.expectSemi()
  2454  	} else if p.tok == token.SEMICOLON {
  2455  		p.next()
  2456  		if p.tok == token.LBRACE {
  2457  			// opening { of function declaration on next line
  2458  			p.error(p.pos, "unexpected semicolon or newline before {")
  2459  			body = p.parseBody(scope)
  2460  			p.expectSemi()
  2461  		}
  2462  	} else {
  2463  		p.expectSemi()
  2464  	}
  2465  
  2466  	decl := &ast.FuncDecl{
  2467  		Doc:  doc,
  2468  		Recv: recv,
  2469  		Name: ident,
  2470  		Type: &ast.FuncType{
  2471  			Func:    pos,
  2472  			Params:  params,
  2473  			Results: results,
  2474  		},
  2475  		Body: body,
  2476  	}
  2477  	if recv == nil {
  2478  		// Go spec: The scope of an identifier denoting a constant, type,
  2479  		// variable, or function (but not method) declared at top level
  2480  		// (outside any function) is the package block.
  2481  		//
  2482  		// init() functions cannot be referred to and there may
  2483  		// be more than one - don't put them in the pkgScope
  2484  		if ident.Name != "init" {
  2485  			p.declare(decl, nil, p.pkgScope, ast.Fun, ident)
  2486  		}
  2487  	}
  2488  
  2489  	return decl
  2490  }
  2491  
  2492  func (p *parser) parseDecl(sync map[token.Token]bool) ast.Decl {
  2493  	if p.trace {
  2494  		defer un(trace(p, "Declaration"))
  2495  	}
  2496  
  2497  	var f parseSpecFunction
  2498  	switch p.tok {
  2499  	case token.CONST, token.VAR:
  2500  		f = p.parseValueSpec
  2501  
  2502  	case token.TYPE:
  2503  		f = p.parseTypeSpec
  2504  
  2505  	case token.FUNC:
  2506  		return p.parseFuncDecl()
  2507  
  2508  	default:
  2509  		pos := p.pos
  2510  		p.errorExpected(pos, "declaration")
  2511  		p.advance(sync)
  2512  		return &ast.BadDecl{From: pos, To: p.pos}
  2513  	}
  2514  
  2515  	return p.parseGenDecl(p.tok, f)
  2516  }
  2517  
  2518  // ----------------------------------------------------------------------------
  2519  // Source files
  2520  
  2521  func (p *parser) parseFile() *ast.File {
  2522  	if p.trace {
  2523  		defer un(trace(p, "File"))
  2524  	}
  2525  
  2526  	// Don't bother parsing the rest if we had errors scanning the first token.
  2527  	// Likely not a Go source file at all.
  2528  	if p.errors.Len() != 0 {
  2529  		return nil
  2530  	}
  2531  
  2532  	// package clause
  2533  	doc := p.leadComment
  2534  	pos := p.expect(token.PACKAGE)
  2535  	// Go spec: The package clause is not a declaration;
  2536  	// the package name does not appear in any scope.
  2537  	ident := p.parseIdent()
  2538  	if ident.Name == "_" && p.mode&DeclarationErrors != 0 {
  2539  		p.error(p.pos, "invalid package name _")
  2540  	}
  2541  	p.expectSemi()
  2542  
  2543  	// Don't bother parsing the rest if we had errors parsing the package clause.
  2544  	// Likely not a Go source file at all.
  2545  	if p.errors.Len() != 0 {
  2546  		return nil
  2547  	}
  2548  
  2549  	p.openScope()
  2550  	p.pkgScope = p.topScope
  2551  	var decls []ast.Decl
  2552  	if p.mode&PackageClauseOnly == 0 {
  2553  		// import decls
  2554  		for p.tok == token.IMPORT {
  2555  			decls = append(decls, p.parseGenDecl(token.IMPORT, p.parseImportSpec))
  2556  		}
  2557  
  2558  		if p.mode&ImportsOnly == 0 {
  2559  			// rest of package body
  2560  			for p.tok != token.EOF {
  2561  				decls = append(decls, p.parseDecl(declStart))
  2562  			}
  2563  		}
  2564  	}
  2565  	p.closeScope()
  2566  	assert(p.topScope == nil, "unbalanced scopes")
  2567  	assert(p.labelScope == nil, "unbalanced label scopes")
  2568  
  2569  	// resolve global identifiers within the same file
  2570  	i := 0
  2571  	for _, ident := range p.unresolved {
  2572  		// i <= index for current ident
  2573  		assert(ident.Obj == unresolved, "object already resolved")
  2574  		ident.Obj = p.pkgScope.Lookup(ident.Name) // also removes unresolved sentinel
  2575  		if ident.Obj == nil {
  2576  			p.unresolved[i] = ident
  2577  			i++
  2578  		}
  2579  	}
  2580  
  2581  	return &ast.File{
  2582  		Doc:        doc,
  2583  		Package:    pos,
  2584  		Name:       ident,
  2585  		Decls:      decls,
  2586  		Scope:      p.pkgScope,
  2587  		Imports:    p.imports,
  2588  		Unresolved: p.unresolved[0:i],
  2589  		Comments:   p.comments,
  2590  	}
  2591  }
  2592  

View as plain text