...
Run Format

Source file src/go/parser/parser.go

     1	// Copyright 2009 The Go Authors. All rights reserved.
     2	// Use of this source code is governed by a BSD-style
     3	// license that can be found in the LICENSE file.
     4	
     5	// Package parser implements a parser for Go source files. Input may be
     6	// provided in a variety of forms (see the various Parse* functions); the
     7	// output is an abstract syntax tree (AST) representing the Go source. The
     8	// parser is invoked through one of the Parse* functions.
     9	//
    10	// The parser accepts a larger language than is syntactically permitted by
    11	// the Go spec, for simplicity, and for improved robustness in the presence
    12	// of syntax errors. For instance, in method declarations, the receiver is
    13	// treated like an ordinary parameter list and thus may contain multiple
    14	// entries where the spec permits exactly one. Consequently, the corresponding
    15	// field in the AST (ast.FuncDecl.Recv) field is not restricted to one entry.
    16	//
    17	package parser
    18	
    19	import (
    20		"fmt"
    21		"go/ast"
    22		"go/scanner"
    23		"go/token"
    24		"strconv"
    25		"strings"
    26		"unicode"
    27	)
    28	
    29	// The parser structure holds the parser's internal state.
    30	type parser struct {
    31		file    *token.File
    32		errors  scanner.ErrorList
    33		scanner scanner.Scanner
    34	
    35		// Tracing/debugging
    36		mode   Mode // parsing mode
    37		trace  bool // == (mode & Trace != 0)
    38		indent int  // indentation used for tracing output
    39	
    40		// Comments
    41		comments    []*ast.CommentGroup
    42		leadComment *ast.CommentGroup // last lead comment
    43		lineComment *ast.CommentGroup // last line comment
    44	
    45		// Next token
    46		pos token.Pos   // token position
    47		tok token.Token // one token look-ahead
    48		lit string      // token literal
    49	
    50		// Error recovery
    51		// (used to limit the number of calls to syncXXX functions
    52		// w/o making scanning progress - avoids potential endless
    53		// loops across multiple parser functions during error recovery)
    54		syncPos token.Pos // last synchronization position
    55		syncCnt int       // number of calls to syncXXX without progress
    56	
    57		// Non-syntactic parser control
    58		exprLev int  // < 0: in control clause, >= 0: in expression
    59		inRhs   bool // if set, the parser is parsing a rhs expression
    60	
    61		// Ordinary identifier scopes
    62		pkgScope   *ast.Scope        // pkgScope.Outer == nil
    63		topScope   *ast.Scope        // top-most scope; may be pkgScope
    64		unresolved []*ast.Ident      // unresolved identifiers
    65		imports    []*ast.ImportSpec // list of imports
    66	
    67		// Label scopes
    68		// (maintained by open/close LabelScope)
    69		labelScope  *ast.Scope     // label scope for current function
    70		targetStack [][]*ast.Ident // stack of unresolved labels
    71	}
    72	
    73	func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode Mode) {
    74		p.file = fset.AddFile(filename, -1, len(src))
    75		var m scanner.Mode
    76		if mode&ParseComments != 0 {
    77			m = scanner.ScanComments
    78		}
    79		eh := func(pos token.Position, msg string) { p.errors.Add(pos, msg) }
    80		p.scanner.Init(p.file, src, eh, m)
    81	
    82		p.mode = mode
    83		p.trace = mode&Trace != 0 // for convenience (p.trace is used frequently)
    84	
    85		p.next()
    86	}
    87	
    88	// ----------------------------------------------------------------------------
    89	// Scoping support
    90	
    91	func (p *parser) openScope() {
    92		p.topScope = ast.NewScope(p.topScope)
    93	}
    94	
    95	func (p *parser) closeScope() {
    96		p.topScope = p.topScope.Outer
    97	}
    98	
    99	func (p *parser) openLabelScope() {
   100		p.labelScope = ast.NewScope(p.labelScope)
   101		p.targetStack = append(p.targetStack, nil)
   102	}
   103	
   104	func (p *parser) closeLabelScope() {
   105		// resolve labels
   106		n := len(p.targetStack) - 1
   107		scope := p.labelScope
   108		for _, ident := range p.targetStack[n] {
   109			ident.Obj = scope.Lookup(ident.Name)
   110			if ident.Obj == nil && p.mode&DeclarationErrors != 0 {
   111				p.error(ident.Pos(), fmt.Sprintf("label %s undefined", ident.Name))
   112			}
   113		}
   114		// pop label scope
   115		p.targetStack = p.targetStack[0:n]
   116		p.labelScope = p.labelScope.Outer
   117	}
   118	
   119	func (p *parser) declare(decl, data interface{}, scope *ast.Scope, kind ast.ObjKind, idents ...*ast.Ident) {
   120		for _, ident := range idents {
   121			assert(ident.Obj == nil, "identifier already declared or resolved")
   122			obj := ast.NewObj(kind, ident.Name)
   123			// remember the corresponding declaration for redeclaration
   124			// errors and global variable resolution/typechecking phase
   125			obj.Decl = decl
   126			obj.Data = data
   127			ident.Obj = obj
   128			if ident.Name != "_" {
   129				if alt := scope.Insert(obj); alt != nil && p.mode&DeclarationErrors != 0 {
   130					prevDecl := ""
   131					if pos := alt.Pos(); pos.IsValid() {
   132						prevDecl = fmt.Sprintf("\n\tprevious declaration at %s", p.file.Position(pos))
   133					}
   134					p.error(ident.Pos(), fmt.Sprintf("%s redeclared in this block%s", ident.Name, prevDecl))
   135				}
   136			}
   137		}
   138	}
   139	
   140	func (p *parser) shortVarDecl(decl *ast.AssignStmt, list []ast.Expr) {
   141		// Go spec: A short variable declaration may redeclare variables
   142		// provided they were originally declared in the same block with
   143		// the same type, and at least one of the non-blank variables is new.
   144		n := 0 // number of new variables
   145		for _, x := range list {
   146			if ident, isIdent := x.(*ast.Ident); isIdent {
   147				assert(ident.Obj == nil, "identifier already declared or resolved")
   148				obj := ast.NewObj(ast.Var, ident.Name)
   149				// remember corresponding assignment for other tools
   150				obj.Decl = decl
   151				ident.Obj = obj
   152				if ident.Name != "_" {
   153					if alt := p.topScope.Insert(obj); alt != nil {
   154						ident.Obj = alt // redeclaration
   155					} else {
   156						n++ // new declaration
   157					}
   158				}
   159			} else {
   160				p.errorExpected(x.Pos(), "identifier on left side of :=")
   161			}
   162		}
   163		if n == 0 && p.mode&DeclarationErrors != 0 {
   164			p.error(list[0].Pos(), "no new variables on left side of :=")
   165		}
   166	}
   167	
   168	// The unresolved object is a sentinel to mark identifiers that have been added
   169	// to the list of unresolved identifiers. The sentinel is only used for verifying
   170	// internal consistency.
   171	var unresolved = new(ast.Object)
   172	
   173	// If x is an identifier, tryResolve attempts to resolve x by looking up
   174	// the object it denotes. If no object is found and collectUnresolved is
   175	// set, x is marked as unresolved and collected in the list of unresolved
   176	// identifiers.
   177	//
   178	func (p *parser) tryResolve(x ast.Expr, collectUnresolved bool) {
   179		// nothing to do if x is not an identifier or the blank identifier
   180		ident, _ := x.(*ast.Ident)
   181		if ident == nil {
   182			return
   183		}
   184		assert(ident.Obj == nil, "identifier already declared or resolved")
   185		if ident.Name == "_" {
   186			return
   187		}
   188		// try to resolve the identifier
   189		for s := p.topScope; s != nil; s = s.Outer {
   190			if obj := s.Lookup(ident.Name); obj != nil {
   191				ident.Obj = obj
   192				return
   193			}
   194		}
   195		// all local scopes are known, so any unresolved identifier
   196		// must be found either in the file scope, package scope
   197		// (perhaps in another file), or universe scope --- collect
   198		// them so that they can be resolved later
   199		if collectUnresolved {
   200			ident.Obj = unresolved
   201			p.unresolved = append(p.unresolved, ident)
   202		}
   203	}
   204	
   205	func (p *parser) resolve(x ast.Expr) {
   206		p.tryResolve(x, true)
   207	}
   208	
   209	// ----------------------------------------------------------------------------
   210	// Parsing support
   211	
   212	func (p *parser) printTrace(a ...interface{}) {
   213		const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . "
   214		const n = len(dots)
   215		pos := p.file.Position(p.pos)
   216		fmt.Printf("%5d:%3d: ", pos.Line, pos.Column)
   217		i := 2 * p.indent
   218		for i > n {
   219			fmt.Print(dots)
   220			i -= n
   221		}
   222		// i <= n
   223		fmt.Print(dots[0:i])
   224		fmt.Println(a...)
   225	}
   226	
   227	func trace(p *parser, msg string) *parser {
   228		p.printTrace(msg, "(")
   229		p.indent++
   230		return p
   231	}
   232	
   233	// Usage pattern: defer un(trace(p, "..."))
   234	func un(p *parser) {
   235		p.indent--
   236		p.printTrace(")")
   237	}
   238	
   239	// Advance to the next token.
   240	func (p *parser) next0() {
   241		// Because of one-token look-ahead, print the previous token
   242		// when tracing as it provides a more readable output. The
   243		// very first token (!p.pos.IsValid()) is not initialized
   244		// (it is token.ILLEGAL), so don't print it .
   245		if p.trace && p.pos.IsValid() {
   246			s := p.tok.String()
   247			switch {
   248			case p.tok.IsLiteral():
   249				p.printTrace(s, p.lit)
   250			case p.tok.IsOperator(), p.tok.IsKeyword():
   251				p.printTrace("\"" + s + "\"")
   252			default:
   253				p.printTrace(s)
   254			}
   255		}
   256	
   257		p.pos, p.tok, p.lit = p.scanner.Scan()
   258	}
   259	
   260	// Consume a comment and return it and the line on which it ends.
   261	func (p *parser) consumeComment() (comment *ast.Comment, endline int) {
   262		// /*-style comments may end on a different line than where they start.
   263		// Scan the comment for '\n' chars and adjust endline accordingly.
   264		endline = p.file.Line(p.pos)
   265		if p.lit[1] == '*' {
   266			// don't use range here - no need to decode Unicode code points
   267			for i := 0; i < len(p.lit); i++ {
   268				if p.lit[i] == '\n' {
   269					endline++
   270				}
   271			}
   272		}
   273	
   274		comment = &ast.Comment{Slash: p.pos, Text: p.lit}
   275		p.next0()
   276	
   277		return
   278	}
   279	
   280	// Consume a group of adjacent comments, add it to the parser's
   281	// comments list, and return it together with the line at which
   282	// the last comment in the group ends. A non-comment token or n
   283	// empty lines terminate a comment group.
   284	//
   285	func (p *parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline int) {
   286		var list []*ast.Comment
   287		endline = p.file.Line(p.pos)
   288		for p.tok == token.COMMENT && p.file.Line(p.pos) <= endline+n {
   289			var comment *ast.Comment
   290			comment, endline = p.consumeComment()
   291			list = append(list, comment)
   292		}
   293	
   294		// add comment group to the comments list
   295		comments = &ast.CommentGroup{List: list}
   296		p.comments = append(p.comments, comments)
   297	
   298		return
   299	}
   300	
   301	// Advance to the next non-comment token. In the process, collect
   302	// any comment groups encountered, and remember the last lead and
   303	// and line comments.
   304	//
   305	// A lead comment is a comment group that starts and ends in a
   306	// line without any other tokens and that is followed by a non-comment
   307	// token on the line immediately after the comment group.
   308	//
   309	// A line comment is a comment group that follows a non-comment
   310	// token on the same line, and that has no tokens after it on the line
   311	// where it ends.
   312	//
   313	// Lead and line comments may be considered documentation that is
   314	// stored in the AST.
   315	//
   316	func (p *parser) next() {
   317		p.leadComment = nil
   318		p.lineComment = nil
   319		prev := p.pos
   320		p.next0()
   321	
   322		if p.tok == token.COMMENT {
   323			var comment *ast.CommentGroup
   324			var endline int
   325	
   326			if p.file.Line(p.pos) == p.file.Line(prev) {
   327				// The comment is on same line as the previous token; it
   328				// cannot be a lead comment but may be a line comment.
   329				comment, endline = p.consumeCommentGroup(0)
   330				if p.file.Line(p.pos) != endline {
   331					// The next token is on a different line, thus
   332					// the last comment group is a line comment.
   333					p.lineComment = comment
   334				}
   335			}
   336	
   337			// consume successor comments, if any
   338			endline = -1
   339			for p.tok == token.COMMENT {
   340				comment, endline = p.consumeCommentGroup(1)
   341			}
   342	
   343			if endline+1 == p.file.Line(p.pos) {
   344				// The next token is following on the line immediately after the
   345				// comment group, thus the last comment group is a lead comment.
   346				p.leadComment = comment
   347			}
   348		}
   349	}
   350	
   351	// A bailout panic is raised to indicate early termination.
   352	type bailout struct{}
   353	
   354	func (p *parser) error(pos token.Pos, msg string) {
   355		epos := p.file.Position(pos)
   356	
   357		// If AllErrors is not set, discard errors reported on the same line
   358		// as the last recorded error and stop parsing if there are more than
   359		// 10 errors.
   360		if p.mode&AllErrors == 0 {
   361			n := len(p.errors)
   362			if n > 0 && p.errors[n-1].Pos.Line == epos.Line {
   363				return // discard - likely a spurious error
   364			}
   365			if n > 10 {
   366				panic(bailout{})
   367			}
   368		}
   369	
   370		p.errors.Add(epos, msg)
   371	}
   372	
   373	func (p *parser) errorExpected(pos token.Pos, msg string) {
   374		msg = "expected " + msg
   375		if pos == p.pos {
   376			// the error happened at the current position;
   377			// make the error message more specific
   378			if p.tok == token.SEMICOLON && p.lit == "\n" {
   379				msg += ", found newline"
   380			} else {
   381				msg += ", found '" + p.tok.String() + "'"
   382				if p.tok.IsLiteral() {
   383					msg += " " + p.lit
   384				}
   385			}
   386		}
   387		p.error(pos, msg)
   388	}
   389	
   390	func (p *parser) expect(tok token.Token) token.Pos {
   391		pos := p.pos
   392		if p.tok != tok {
   393			p.errorExpected(pos, "'"+tok.String()+"'")
   394		}
   395		p.next() // make progress
   396		return pos
   397	}
   398	
   399	// expectClosing is like expect but provides a better error message
   400	// for the common case of a missing comma before a newline.
   401	//
   402	func (p *parser) expectClosing(tok token.Token, context string) token.Pos {
   403		if p.tok != tok && p.tok == token.SEMICOLON && p.lit == "\n" {
   404			p.error(p.pos, "missing ',' before newline in "+context)
   405			p.next()
   406		}
   407		return p.expect(tok)
   408	}
   409	
   410	func (p *parser) expectSemi() {
   411		// semicolon is optional before a closing ')' or '}'
   412		if p.tok != token.RPAREN && p.tok != token.RBRACE {
   413			if p.tok == token.SEMICOLON {
   414				p.next()
   415			} else {
   416				p.errorExpected(p.pos, "';'")
   417				syncStmt(p)
   418			}
   419		}
   420	}
   421	
   422	func (p *parser) atComma(context string, follow token.Token) bool {
   423		if p.tok == token.COMMA {
   424			return true
   425		}
   426		if p.tok != follow {
   427			msg := "missing ','"
   428			if p.tok == token.SEMICOLON && p.lit == "\n" {
   429				msg += " before newline"
   430			}
   431			p.error(p.pos, msg+" in "+context)
   432			return true // "insert" comma and continue
   433		}
   434		return false
   435	}
   436	
   437	func assert(cond bool, msg string) {
   438		if !cond {
   439			panic("go/parser internal error: " + msg)
   440		}
   441	}
   442	
   443	// syncStmt advances to the next statement.
   444	// Used for synchronization after an error.
   445	//
   446	func syncStmt(p *parser) {
   447		for {
   448			switch p.tok {
   449			case token.BREAK, token.CONST, token.CONTINUE, token.DEFER,
   450				token.FALLTHROUGH, token.FOR, token.GO, token.GOTO,
   451				token.IF, token.RETURN, token.SELECT, token.SWITCH,
   452				token.TYPE, token.VAR:
   453				// Return only if parser made some progress since last
   454				// sync or if it has not reached 10 sync calls without
   455				// progress. Otherwise consume at least one token to
   456				// avoid an endless parser loop (it is possible that
   457				// both parseOperand and parseStmt call syncStmt and
   458				// correctly do not advance, thus the need for the
   459				// invocation limit p.syncCnt).
   460				if p.pos == p.syncPos && p.syncCnt < 10 {
   461					p.syncCnt++
   462					return
   463				}
   464				if p.pos > p.syncPos {
   465					p.syncPos = p.pos
   466					p.syncCnt = 0
   467					return
   468				}
   469				// Reaching here indicates a parser bug, likely an
   470				// incorrect token list in this function, but it only
   471				// leads to skipping of possibly correct code if a
   472				// previous error is present, and thus is preferred
   473				// over a non-terminating parse.
   474			case token.EOF:
   475				return
   476			}
   477			p.next()
   478		}
   479	}
   480	
   481	// syncDecl advances to the next declaration.
   482	// Used for synchronization after an error.
   483	//
   484	func syncDecl(p *parser) {
   485		for {
   486			switch p.tok {
   487			case token.CONST, token.TYPE, token.VAR:
   488				// see comments in syncStmt
   489				if p.pos == p.syncPos && p.syncCnt < 10 {
   490					p.syncCnt++
   491					return
   492				}
   493				if p.pos > p.syncPos {
   494					p.syncPos = p.pos
   495					p.syncCnt = 0
   496					return
   497				}
   498			case token.EOF:
   499				return
   500			}
   501			p.next()
   502		}
   503	}
   504	
   505	// safePos returns a valid file position for a given position: If pos
   506	// is valid to begin with, safePos returns pos. If pos is out-of-range,
   507	// safePos returns the EOF position.
   508	//
   509	// This is hack to work around "artificial" end positions in the AST which
   510	// are computed by adding 1 to (presumably valid) token positions. If the
   511	// token positions are invalid due to parse errors, the resulting end position
   512	// may be past the file's EOF position, which would lead to panics if used
   513	// later on.
   514	//
   515	func (p *parser) safePos(pos token.Pos) (res token.Pos) {
   516		defer func() {
   517			if recover() != nil {
   518				res = token.Pos(p.file.Base() + p.file.Size()) // EOF position
   519			}
   520		}()
   521		_ = p.file.Offset(pos) // trigger a panic if position is out-of-range
   522		return pos
   523	}
   524	
   525	// ----------------------------------------------------------------------------
   526	// Identifiers
   527	
   528	func (p *parser) parseIdent() *ast.Ident {
   529		pos := p.pos
   530		name := "_"
   531		if p.tok == token.IDENT {
   532			name = p.lit
   533			p.next()
   534		} else {
   535			p.expect(token.IDENT) // use expect() error handling
   536		}
   537		return &ast.Ident{NamePos: pos, Name: name}
   538	}
   539	
   540	func (p *parser) parseIdentList() (list []*ast.Ident) {
   541		if p.trace {
   542			defer un(trace(p, "IdentList"))
   543		}
   544	
   545		list = append(list, p.parseIdent())
   546		for p.tok == token.COMMA {
   547			p.next()
   548			list = append(list, p.parseIdent())
   549		}
   550	
   551		return
   552	}
   553	
   554	// ----------------------------------------------------------------------------
   555	// Common productions
   556	
   557	// If lhs is set, result list elements which are identifiers are not resolved.
   558	func (p *parser) parseExprList(lhs bool) (list []ast.Expr) {
   559		if p.trace {
   560			defer un(trace(p, "ExpressionList"))
   561		}
   562	
   563		list = append(list, p.checkExpr(p.parseExpr(lhs)))
   564		for p.tok == token.COMMA {
   565			p.next()
   566			list = append(list, p.checkExpr(p.parseExpr(lhs)))
   567		}
   568	
   569		return
   570	}
   571	
   572	func (p *parser) parseLhsList() []ast.Expr {
   573		old := p.inRhs
   574		p.inRhs = false
   575		list := p.parseExprList(true)
   576		switch p.tok {
   577		case token.DEFINE:
   578			// lhs of a short variable declaration
   579			// but doesn't enter scope until later:
   580			// caller must call p.shortVarDecl(p.makeIdentList(list))
   581			// at appropriate time.
   582		case token.COLON:
   583			// lhs of a label declaration or a communication clause of a select
   584			// statement (parseLhsList is not called when parsing the case clause
   585			// of a switch statement):
   586			// - labels are declared by the caller of parseLhsList
   587			// - for communication clauses, if there is a stand-alone identifier
   588			//   followed by a colon, we have a syntax error; there is no need
   589			//   to resolve the identifier in that case
   590		default:
   591			// identifiers must be declared elsewhere
   592			for _, x := range list {
   593				p.resolve(x)
   594			}
   595		}
   596		p.inRhs = old
   597		return list
   598	}
   599	
   600	func (p *parser) parseRhsList() []ast.Expr {
   601		old := p.inRhs
   602		p.inRhs = true
   603		list := p.parseExprList(false)
   604		p.inRhs = old
   605		return list
   606	}
   607	
   608	// ----------------------------------------------------------------------------
   609	// Types
   610	
   611	func (p *parser) parseType() ast.Expr {
   612		if p.trace {
   613			defer un(trace(p, "Type"))
   614		}
   615	
   616		typ := p.tryType()
   617	
   618		if typ == nil {
   619			pos := p.pos
   620			p.errorExpected(pos, "type")
   621			p.next() // make progress
   622			return &ast.BadExpr{From: pos, To: p.pos}
   623		}
   624	
   625		return typ
   626	}
   627	
   628	// If the result is an identifier, it is not resolved.
   629	func (p *parser) parseTypeName() ast.Expr {
   630		if p.trace {
   631			defer un(trace(p, "TypeName"))
   632		}
   633	
   634		ident := p.parseIdent()
   635		// don't resolve ident yet - it may be a parameter or field name
   636	
   637		if p.tok == token.PERIOD {
   638			// ident is a package name
   639			p.next()
   640			p.resolve(ident)
   641			sel := p.parseIdent()
   642			return &ast.SelectorExpr{X: ident, Sel: sel}
   643		}
   644	
   645		return ident
   646	}
   647	
   648	func (p *parser) parseArrayType() ast.Expr {
   649		if p.trace {
   650			defer un(trace(p, "ArrayType"))
   651		}
   652	
   653		lbrack := p.expect(token.LBRACK)
   654		p.exprLev++
   655		var len ast.Expr
   656		// always permit ellipsis for more fault-tolerant parsing
   657		if p.tok == token.ELLIPSIS {
   658			len = &ast.Ellipsis{Ellipsis: p.pos}
   659			p.next()
   660		} else if p.tok != token.RBRACK {
   661			len = p.parseRhs()
   662		}
   663		p.exprLev--
   664		p.expect(token.RBRACK)
   665		elt := p.parseType()
   666	
   667		return &ast.ArrayType{Lbrack: lbrack, Len: len, Elt: elt}
   668	}
   669	
   670	func (p *parser) makeIdentList(list []ast.Expr) []*ast.Ident {
   671		idents := make([]*ast.Ident, len(list))
   672		for i, x := range list {
   673			ident, isIdent := x.(*ast.Ident)
   674			if !isIdent {
   675				if _, isBad := x.(*ast.BadExpr); !isBad {
   676					// only report error if it's a new one
   677					p.errorExpected(x.Pos(), "identifier")
   678				}
   679				ident = &ast.Ident{NamePos: x.Pos(), Name: "_"}
   680			}
   681			idents[i] = ident
   682		}
   683		return idents
   684	}
   685	
   686	func (p *parser) parseFieldDecl(scope *ast.Scope) *ast.Field {
   687		if p.trace {
   688			defer un(trace(p, "FieldDecl"))
   689		}
   690	
   691		doc := p.leadComment
   692	
   693		// FieldDecl
   694		list, typ := p.parseVarList(false)
   695	
   696		// Tag
   697		var tag *ast.BasicLit
   698		if p.tok == token.STRING {
   699			tag = &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
   700			p.next()
   701		}
   702	
   703		// analyze case
   704		var idents []*ast.Ident
   705		if typ != nil {
   706			// IdentifierList Type
   707			idents = p.makeIdentList(list)
   708		} else {
   709			// ["*"] TypeName (AnonymousField)
   710			typ = list[0] // we always have at least one element
   711			if n := len(list); n > 1 || !isTypeName(deref(typ)) {
   712				pos := typ.Pos()
   713				p.errorExpected(pos, "anonymous field")
   714				typ = &ast.BadExpr{From: pos, To: p.safePos(list[n-1].End())}
   715			}
   716		}
   717	
   718		p.expectSemi() // call before accessing p.linecomment
   719	
   720		field := &ast.Field{Doc: doc, Names: idents, Type: typ, Tag: tag, Comment: p.lineComment}
   721		p.declare(field, nil, scope, ast.Var, idents...)
   722		p.resolve(typ)
   723	
   724		return field
   725	}
   726	
   727	func (p *parser) parseStructType() *ast.StructType {
   728		if p.trace {
   729			defer un(trace(p, "StructType"))
   730		}
   731	
   732		pos := p.expect(token.STRUCT)
   733		lbrace := p.expect(token.LBRACE)
   734		scope := ast.NewScope(nil) // struct scope
   735		var list []*ast.Field
   736		for p.tok == token.IDENT || p.tok == token.MUL || p.tok == token.LPAREN {
   737			// a field declaration cannot start with a '(' but we accept
   738			// it here for more robust parsing and better error messages
   739			// (parseFieldDecl will check and complain if necessary)
   740			list = append(list, p.parseFieldDecl(scope))
   741		}
   742		rbrace := p.expect(token.RBRACE)
   743	
   744		return &ast.StructType{
   745			Struct: pos,
   746			Fields: &ast.FieldList{
   747				Opening: lbrace,
   748				List:    list,
   749				Closing: rbrace,
   750			},
   751		}
   752	}
   753	
   754	func (p *parser) parsePointerType() *ast.StarExpr {
   755		if p.trace {
   756			defer un(trace(p, "PointerType"))
   757		}
   758	
   759		star := p.expect(token.MUL)
   760		base := p.parseType()
   761	
   762		return &ast.StarExpr{Star: star, X: base}
   763	}
   764	
   765	// If the result is an identifier, it is not resolved.
   766	func (p *parser) tryVarType(isParam bool) ast.Expr {
   767		if isParam && p.tok == token.ELLIPSIS {
   768			pos := p.pos
   769			p.next()
   770			typ := p.tryIdentOrType() // don't use parseType so we can provide better error message
   771			if typ != nil {
   772				p.resolve(typ)
   773			} else {
   774				p.error(pos, "'...' parameter is missing type")
   775				typ = &ast.BadExpr{From: pos, To: p.pos}
   776			}
   777			return &ast.Ellipsis{Ellipsis: pos, Elt: typ}
   778		}
   779		return p.tryIdentOrType()
   780	}
   781	
   782	// If the result is an identifier, it is not resolved.
   783	func (p *parser) parseVarType(isParam bool) ast.Expr {
   784		typ := p.tryVarType(isParam)
   785		if typ == nil {
   786			pos := p.pos
   787			p.errorExpected(pos, "type")
   788			p.next() // make progress
   789			typ = &ast.BadExpr{From: pos, To: p.pos}
   790		}
   791		return typ
   792	}
   793	
   794	// If any of the results are identifiers, they are not resolved.
   795	func (p *parser) parseVarList(isParam bool) (list []ast.Expr, typ ast.Expr) {
   796		if p.trace {
   797			defer un(trace(p, "VarList"))
   798		}
   799	
   800		// a list of identifiers looks like a list of type names
   801		//
   802		// parse/tryVarType accepts any type (including parenthesized
   803		// ones) even though the syntax does not permit them here: we
   804		// accept them all for more robust parsing and complain later
   805		for typ := p.parseVarType(isParam); typ != nil; {
   806			list = append(list, typ)
   807			if p.tok != token.COMMA {
   808				break
   809			}
   810			p.next()
   811			typ = p.tryVarType(isParam) // maybe nil as in: func f(int,) {}
   812		}
   813	
   814		// if we had a list of identifiers, it must be followed by a type
   815		typ = p.tryVarType(isParam)
   816	
   817		return
   818	}
   819	
   820	func (p *parser) parseParameterList(scope *ast.Scope, ellipsisOk bool) (params []*ast.Field) {
   821		if p.trace {
   822			defer un(trace(p, "ParameterList"))
   823		}
   824	
   825		// ParameterDecl
   826		list, typ := p.parseVarList(ellipsisOk)
   827	
   828		// analyze case
   829		if typ != nil {
   830			// IdentifierList Type
   831			idents := p.makeIdentList(list)
   832			field := &ast.Field{Names: idents, Type: typ}
   833			params = append(params, field)
   834			// Go spec: The scope of an identifier denoting a function
   835			// parameter or result variable is the function body.
   836			p.declare(field, nil, scope, ast.Var, idents...)
   837			p.resolve(typ)
   838			if !p.atComma("parameter list", token.RPAREN) {
   839				return
   840			}
   841			p.next()
   842			for p.tok != token.RPAREN && p.tok != token.EOF {
   843				idents := p.parseIdentList()
   844				typ := p.parseVarType(ellipsisOk)
   845				field := &ast.Field{Names: idents, Type: typ}
   846				params = append(params, field)
   847				// Go spec: The scope of an identifier denoting a function
   848				// parameter or result variable is the function body.
   849				p.declare(field, nil, scope, ast.Var, idents...)
   850				p.resolve(typ)
   851				if !p.atComma("parameter list", token.RPAREN) {
   852					break
   853				}
   854				p.next()
   855			}
   856			return
   857		}
   858	
   859		// Type { "," Type } (anonymous parameters)
   860		params = make([]*ast.Field, len(list))
   861		for i, typ := range list {
   862			p.resolve(typ)
   863			params[i] = &ast.Field{Type: typ}
   864		}
   865		return
   866	}
   867	
   868	func (p *parser) parseParameters(scope *ast.Scope, ellipsisOk bool) *ast.FieldList {
   869		if p.trace {
   870			defer un(trace(p, "Parameters"))
   871		}
   872	
   873		var params []*ast.Field
   874		lparen := p.expect(token.LPAREN)
   875		if p.tok != token.RPAREN {
   876			params = p.parseParameterList(scope, ellipsisOk)
   877		}
   878		rparen := p.expect(token.RPAREN)
   879	
   880		return &ast.FieldList{Opening: lparen, List: params, Closing: rparen}
   881	}
   882	
   883	func (p *parser) parseResult(scope *ast.Scope) *ast.FieldList {
   884		if p.trace {
   885			defer un(trace(p, "Result"))
   886		}
   887	
   888		if p.tok == token.LPAREN {
   889			return p.parseParameters(scope, false)
   890		}
   891	
   892		typ := p.tryType()
   893		if typ != nil {
   894			list := make([]*ast.Field, 1)
   895			list[0] = &ast.Field{Type: typ}
   896			return &ast.FieldList{List: list}
   897		}
   898	
   899		return nil
   900	}
   901	
   902	func (p *parser) parseSignature(scope *ast.Scope) (params, results *ast.FieldList) {
   903		if p.trace {
   904			defer un(trace(p, "Signature"))
   905		}
   906	
   907		params = p.parseParameters(scope, true)
   908		results = p.parseResult(scope)
   909	
   910		return
   911	}
   912	
   913	func (p *parser) parseFuncType() (*ast.FuncType, *ast.Scope) {
   914		if p.trace {
   915			defer un(trace(p, "FuncType"))
   916		}
   917	
   918		pos := p.expect(token.FUNC)
   919		scope := ast.NewScope(p.topScope) // function scope
   920		params, results := p.parseSignature(scope)
   921	
   922		return &ast.FuncType{Func: pos, Params: params, Results: results}, scope
   923	}
   924	
   925	func (p *parser) parseMethodSpec(scope *ast.Scope) *ast.Field {
   926		if p.trace {
   927			defer un(trace(p, "MethodSpec"))
   928		}
   929	
   930		doc := p.leadComment
   931		var idents []*ast.Ident
   932		var typ ast.Expr
   933		x := p.parseTypeName()
   934		if ident, isIdent := x.(*ast.Ident); isIdent && p.tok == token.LPAREN {
   935			// method
   936			idents = []*ast.Ident{ident}
   937			scope := ast.NewScope(nil) // method scope
   938			params, results := p.parseSignature(scope)
   939			typ = &ast.FuncType{Func: token.NoPos, Params: params, Results: results}
   940		} else {
   941			// embedded interface
   942			typ = x
   943			p.resolve(typ)
   944		}
   945		p.expectSemi() // call before accessing p.linecomment
   946	
   947		spec := &ast.Field{Doc: doc, Names: idents, Type: typ, Comment: p.lineComment}
   948		p.declare(spec, nil, scope, ast.Fun, idents...)
   949	
   950		return spec
   951	}
   952	
   953	func (p *parser) parseInterfaceType() *ast.InterfaceType {
   954		if p.trace {
   955			defer un(trace(p, "InterfaceType"))
   956		}
   957	
   958		pos := p.expect(token.INTERFACE)
   959		lbrace := p.expect(token.LBRACE)
   960		scope := ast.NewScope(nil) // interface scope
   961		var list []*ast.Field
   962		for p.tok == token.IDENT {
   963			list = append(list, p.parseMethodSpec(scope))
   964		}
   965		rbrace := p.expect(token.RBRACE)
   966	
   967		return &ast.InterfaceType{
   968			Interface: pos,
   969			Methods: &ast.FieldList{
   970				Opening: lbrace,
   971				List:    list,
   972				Closing: rbrace,
   973			},
   974		}
   975	}
   976	
   977	func (p *parser) parseMapType() *ast.MapType {
   978		if p.trace {
   979			defer un(trace(p, "MapType"))
   980		}
   981	
   982		pos := p.expect(token.MAP)
   983		p.expect(token.LBRACK)
   984		key := p.parseType()
   985		p.expect(token.RBRACK)
   986		value := p.parseType()
   987	
   988		return &ast.MapType{Map: pos, Key: key, Value: value}
   989	}
   990	
   991	func (p *parser) parseChanType() *ast.ChanType {
   992		if p.trace {
   993			defer un(trace(p, "ChanType"))
   994		}
   995	
   996		pos := p.pos
   997		dir := ast.SEND | ast.RECV
   998		var arrow token.Pos
   999		if p.tok == token.CHAN {
  1000			p.next()
  1001			if p.tok == token.ARROW {
  1002				arrow = p.pos
  1003				p.next()
  1004				dir = ast.SEND
  1005			}
  1006		} else {
  1007			arrow = p.expect(token.ARROW)
  1008			p.expect(token.CHAN)
  1009			dir = ast.RECV
  1010		}
  1011		value := p.parseType()
  1012	
  1013		return &ast.ChanType{Begin: pos, Arrow: arrow, Dir: dir, Value: value}
  1014	}
  1015	
  1016	// If the result is an identifier, it is not resolved.
  1017	func (p *parser) tryIdentOrType() ast.Expr {
  1018		switch p.tok {
  1019		case token.IDENT:
  1020			return p.parseTypeName()
  1021		case token.LBRACK:
  1022			return p.parseArrayType()
  1023		case token.STRUCT:
  1024			return p.parseStructType()
  1025		case token.MUL:
  1026			return p.parsePointerType()
  1027		case token.FUNC:
  1028			typ, _ := p.parseFuncType()
  1029			return typ
  1030		case token.INTERFACE:
  1031			return p.parseInterfaceType()
  1032		case token.MAP:
  1033			return p.parseMapType()
  1034		case token.CHAN, token.ARROW:
  1035			return p.parseChanType()
  1036		case token.LPAREN:
  1037			lparen := p.pos
  1038			p.next()
  1039			typ := p.parseType()
  1040			rparen := p.expect(token.RPAREN)
  1041			return &ast.ParenExpr{Lparen: lparen, X: typ, Rparen: rparen}
  1042		}
  1043	
  1044		// no type found
  1045		return nil
  1046	}
  1047	
  1048	func (p *parser) tryType() ast.Expr {
  1049		typ := p.tryIdentOrType()
  1050		if typ != nil {
  1051			p.resolve(typ)
  1052		}
  1053		return typ
  1054	}
  1055	
  1056	// ----------------------------------------------------------------------------
  1057	// Blocks
  1058	
  1059	func (p *parser) parseStmtList() (list []ast.Stmt) {
  1060		if p.trace {
  1061			defer un(trace(p, "StatementList"))
  1062		}
  1063	
  1064		for p.tok != token.CASE && p.tok != token.DEFAULT && p.tok != token.RBRACE && p.tok != token.EOF {
  1065			list = append(list, p.parseStmt())
  1066		}
  1067	
  1068		return
  1069	}
  1070	
  1071	func (p *parser) parseBody(scope *ast.Scope) *ast.BlockStmt {
  1072		if p.trace {
  1073			defer un(trace(p, "Body"))
  1074		}
  1075	
  1076		lbrace := p.expect(token.LBRACE)
  1077		p.topScope = scope // open function scope
  1078		p.openLabelScope()
  1079		list := p.parseStmtList()
  1080		p.closeLabelScope()
  1081		p.closeScope()
  1082		rbrace := p.expect(token.RBRACE)
  1083	
  1084		return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
  1085	}
  1086	
  1087	func (p *parser) parseBlockStmt() *ast.BlockStmt {
  1088		if p.trace {
  1089			defer un(trace(p, "BlockStmt"))
  1090		}
  1091	
  1092		lbrace := p.expect(token.LBRACE)
  1093		p.openScope()
  1094		list := p.parseStmtList()
  1095		p.closeScope()
  1096		rbrace := p.expect(token.RBRACE)
  1097	
  1098		return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
  1099	}
  1100	
  1101	// ----------------------------------------------------------------------------
  1102	// Expressions
  1103	
  1104	func (p *parser) parseFuncTypeOrLit() ast.Expr {
  1105		if p.trace {
  1106			defer un(trace(p, "FuncTypeOrLit"))
  1107		}
  1108	
  1109		typ, scope := p.parseFuncType()
  1110		if p.tok != token.LBRACE {
  1111			// function type only
  1112			return typ
  1113		}
  1114	
  1115		p.exprLev++
  1116		body := p.parseBody(scope)
  1117		p.exprLev--
  1118	
  1119		return &ast.FuncLit{Type: typ, Body: body}
  1120	}
  1121	
  1122	// parseOperand may return an expression or a raw type (incl. array
  1123	// types of the form [...]T. Callers must verify the result.
  1124	// If lhs is set and the result is an identifier, it is not resolved.
  1125	//
  1126	func (p *parser) parseOperand(lhs bool) ast.Expr {
  1127		if p.trace {
  1128			defer un(trace(p, "Operand"))
  1129		}
  1130	
  1131		switch p.tok {
  1132		case token.IDENT:
  1133			x := p.parseIdent()
  1134			if !lhs {
  1135				p.resolve(x)
  1136			}
  1137			return x
  1138	
  1139		case token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING:
  1140			x := &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
  1141			p.next()
  1142			return x
  1143	
  1144		case token.LPAREN:
  1145			lparen := p.pos
  1146			p.next()
  1147			p.exprLev++
  1148			x := p.parseRhsOrType() // types may be parenthesized: (some type)
  1149			p.exprLev--
  1150			rparen := p.expect(token.RPAREN)
  1151			return &ast.ParenExpr{Lparen: lparen, X: x, Rparen: rparen}
  1152	
  1153		case token.FUNC:
  1154			return p.parseFuncTypeOrLit()
  1155		}
  1156	
  1157		if typ := p.tryIdentOrType(); typ != nil {
  1158			// could be type for composite literal or conversion
  1159			_, isIdent := typ.(*ast.Ident)
  1160			assert(!isIdent, "type cannot be identifier")
  1161			return typ
  1162		}
  1163	
  1164		// we have an error
  1165		pos := p.pos
  1166		p.errorExpected(pos, "operand")
  1167		syncStmt(p)
  1168		return &ast.BadExpr{From: pos, To: p.pos}
  1169	}
  1170	
  1171	func (p *parser) parseSelector(x ast.Expr) ast.Expr {
  1172		if p.trace {
  1173			defer un(trace(p, "Selector"))
  1174		}
  1175	
  1176		sel := p.parseIdent()
  1177	
  1178		return &ast.SelectorExpr{X: x, Sel: sel}
  1179	}
  1180	
  1181	func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr {
  1182		if p.trace {
  1183			defer un(trace(p, "TypeAssertion"))
  1184		}
  1185	
  1186		lparen := p.expect(token.LPAREN)
  1187		var typ ast.Expr
  1188		if p.tok == token.TYPE {
  1189			// type switch: typ == nil
  1190			p.next()
  1191		} else {
  1192			typ = p.parseType()
  1193		}
  1194		rparen := p.expect(token.RPAREN)
  1195	
  1196		return &ast.TypeAssertExpr{X: x, Type: typ, Lparen: lparen, Rparen: rparen}
  1197	}
  1198	
  1199	func (p *parser) parseIndexOrSlice(x ast.Expr) ast.Expr {
  1200		if p.trace {
  1201			defer un(trace(p, "IndexOrSlice"))
  1202		}
  1203	
  1204		const N = 3 // change the 3 to 2 to disable 3-index slices
  1205		lbrack := p.expect(token.LBRACK)
  1206		p.exprLev++
  1207		var index [N]ast.Expr
  1208		var colons [N - 1]token.Pos
  1209		if p.tok != token.COLON {
  1210			index[0] = p.parseRhs()
  1211		}
  1212		ncolons := 0
  1213		for p.tok == token.COLON && ncolons < len(colons) {
  1214			colons[ncolons] = p.pos
  1215			ncolons++
  1216			p.next()
  1217			if p.tok != token.COLON && p.tok != token.RBRACK && p.tok != token.EOF {
  1218				index[ncolons] = p.parseRhs()
  1219			}
  1220		}
  1221		p.exprLev--
  1222		rbrack := p.expect(token.RBRACK)
  1223	
  1224		if ncolons > 0 {
  1225			// slice expression
  1226			slice3 := false
  1227			if ncolons == 2 {
  1228				slice3 = true
  1229				// Check presence of 2nd and 3rd index here rather than during type-checking
  1230				// to prevent erroneous programs from passing through gofmt (was issue 7305).
  1231				if index[1] == nil {
  1232					p.error(colons[0], "2nd index required in 3-index slice")
  1233					index[1] = &ast.BadExpr{From: colons[0] + 1, To: colons[1]}
  1234				}
  1235				if index[2] == nil {
  1236					p.error(colons[1], "3rd index required in 3-index slice")
  1237					index[2] = &ast.BadExpr{From: colons[1] + 1, To: rbrack}
  1238				}
  1239			}
  1240			return &ast.SliceExpr{X: x, Lbrack: lbrack, Low: index[0], High: index[1], Max: index[2], Slice3: slice3, Rbrack: rbrack}
  1241		}
  1242	
  1243		return &ast.IndexExpr{X: x, Lbrack: lbrack, Index: index[0], Rbrack: rbrack}
  1244	}
  1245	
  1246	func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr {
  1247		if p.trace {
  1248			defer un(trace(p, "CallOrConversion"))
  1249		}
  1250	
  1251		lparen := p.expect(token.LPAREN)
  1252		p.exprLev++
  1253		var list []ast.Expr
  1254		var ellipsis token.Pos
  1255		for p.tok != token.RPAREN && p.tok != token.EOF && !ellipsis.IsValid() {
  1256			list = append(list, p.parseRhsOrType()) // builtins may expect a type: make(some type, ...)
  1257			if p.tok == token.ELLIPSIS {
  1258				ellipsis = p.pos
  1259				p.next()
  1260			}
  1261			if !p.atComma("argument list", token.RPAREN) {
  1262				break
  1263			}
  1264			p.next()
  1265		}
  1266		p.exprLev--
  1267		rparen := p.expectClosing(token.RPAREN, "argument list")
  1268	
  1269		return &ast.CallExpr{Fun: fun, Lparen: lparen, Args: list, Ellipsis: ellipsis, Rparen: rparen}
  1270	}
  1271	
  1272	func (p *parser) parseValue(keyOk bool) ast.Expr {
  1273		if p.trace {
  1274			defer un(trace(p, "Element"))
  1275		}
  1276	
  1277		if p.tok == token.LBRACE {
  1278			return p.parseLiteralValue(nil)
  1279		}
  1280	
  1281		// Because the parser doesn't know the composite literal type, it cannot
  1282		// know if a key that's an identifier is a struct field name or a name
  1283		// denoting a value. The former is not resolved by the parser or the
  1284		// resolver.
  1285		//
  1286		// Instead, _try_ to resolve such a key if possible. If it resolves,
  1287		// it a) has correctly resolved, or b) incorrectly resolved because
  1288		// the key is a struct field with a name matching another identifier.
  1289		// In the former case we are done, and in the latter case we don't
  1290		// care because the type checker will do a separate field lookup.
  1291		//
  1292		// If the key does not resolve, it a) must be defined at the top
  1293		// level in another file of the same package, the universe scope, or be
  1294		// undeclared; or b) it is a struct field. In the former case, the type
  1295		// checker can do a top-level lookup, and in the latter case it will do
  1296		// a separate field lookup.
  1297		x := p.checkExpr(p.parseExpr(keyOk))
  1298		if keyOk {
  1299			if p.tok == token.COLON {
  1300				// Try to resolve the key but don't collect it
  1301				// as unresolved identifier if it fails so that
  1302				// we don't get (possibly false) errors about
  1303				// undeclared names.
  1304				p.tryResolve(x, false)
  1305			} else {
  1306				// not a key
  1307				p.resolve(x)
  1308			}
  1309		}
  1310	
  1311		return x
  1312	}
  1313	
  1314	func (p *parser) parseElement() ast.Expr {
  1315		if p.trace {
  1316			defer un(trace(p, "Element"))
  1317		}
  1318	
  1319		x := p.parseValue(true)
  1320		if p.tok == token.COLON {
  1321			colon := p.pos
  1322			p.next()
  1323			x = &ast.KeyValueExpr{Key: x, Colon: colon, Value: p.parseValue(false)}
  1324		}
  1325	
  1326		return x
  1327	}
  1328	
  1329	func (p *parser) parseElementList() (list []ast.Expr) {
  1330		if p.trace {
  1331			defer un(trace(p, "ElementList"))
  1332		}
  1333	
  1334		for p.tok != token.RBRACE && p.tok != token.EOF {
  1335			list = append(list, p.parseElement())
  1336			if !p.atComma("composite literal", token.RBRACE) {
  1337				break
  1338			}
  1339			p.next()
  1340		}
  1341	
  1342		return
  1343	}
  1344	
  1345	func (p *parser) parseLiteralValue(typ ast.Expr) ast.Expr {
  1346		if p.trace {
  1347			defer un(trace(p, "LiteralValue"))
  1348		}
  1349	
  1350		lbrace := p.expect(token.LBRACE)
  1351		var elts []ast.Expr
  1352		p.exprLev++
  1353		if p.tok != token.RBRACE {
  1354			elts = p.parseElementList()
  1355		}
  1356		p.exprLev--
  1357		rbrace := p.expectClosing(token.RBRACE, "composite literal")
  1358		return &ast.CompositeLit{Type: typ, Lbrace: lbrace, Elts: elts, Rbrace: rbrace}
  1359	}
  1360	
  1361	// checkExpr checks that x is an expression (and not a type).
  1362	func (p *parser) checkExpr(x ast.Expr) ast.Expr {
  1363		switch unparen(x).(type) {
  1364		case *ast.BadExpr:
  1365		case *ast.Ident:
  1366		case *ast.BasicLit:
  1367		case *ast.FuncLit:
  1368		case *ast.CompositeLit:
  1369		case *ast.ParenExpr:
  1370			panic("unreachable")
  1371		case *ast.SelectorExpr:
  1372		case *ast.IndexExpr:
  1373		case *ast.SliceExpr:
  1374		case *ast.TypeAssertExpr:
  1375			// If t.Type == nil we have a type assertion of the form
  1376			// y.(type), which is only allowed in type switch expressions.
  1377			// It's hard to exclude those but for the case where we are in
  1378			// a type switch. Instead be lenient and test this in the type
  1379			// checker.
  1380		case *ast.CallExpr:
  1381		case *ast.StarExpr:
  1382		case *ast.UnaryExpr:
  1383		case *ast.BinaryExpr:
  1384		default:
  1385			// all other nodes are not proper expressions
  1386			p.errorExpected(x.Pos(), "expression")
  1387			x = &ast.BadExpr{From: x.Pos(), To: p.safePos(x.End())}
  1388		}
  1389		return x
  1390	}
  1391	
  1392	// isTypeName reports whether x is a (qualified) TypeName.
  1393	func isTypeName(x ast.Expr) bool {
  1394		switch t := x.(type) {
  1395		case *ast.BadExpr:
  1396		case *ast.Ident:
  1397		case *ast.SelectorExpr:
  1398			_, isIdent := t.X.(*ast.Ident)
  1399			return isIdent
  1400		default:
  1401			return false // all other nodes are not type names
  1402		}
  1403		return true
  1404	}
  1405	
  1406	// isLiteralType reports whether x is a legal composite literal type.
  1407	func isLiteralType(x ast.Expr) bool {
  1408		switch t := x.(type) {
  1409		case *ast.BadExpr:
  1410		case *ast.Ident:
  1411		case *ast.SelectorExpr:
  1412			_, isIdent := t.X.(*ast.Ident)
  1413			return isIdent
  1414		case *ast.ArrayType:
  1415		case *ast.StructType:
  1416		case *ast.MapType:
  1417		default:
  1418			return false // all other nodes are not legal composite literal types
  1419		}
  1420		return true
  1421	}
  1422	
  1423	// If x is of the form *T, deref returns T, otherwise it returns x.
  1424	func deref(x ast.Expr) ast.Expr {
  1425		if p, isPtr := x.(*ast.StarExpr); isPtr {
  1426			x = p.X
  1427		}
  1428		return x
  1429	}
  1430	
  1431	// If x is of the form (T), unparen returns unparen(T), otherwise it returns x.
  1432	func unparen(x ast.Expr) ast.Expr {
  1433		if p, isParen := x.(*ast.ParenExpr); isParen {
  1434			x = unparen(p.X)
  1435		}
  1436		return x
  1437	}
  1438	
  1439	// checkExprOrType checks that x is an expression or a type
  1440	// (and not a raw type such as [...]T).
  1441	//
  1442	func (p *parser) checkExprOrType(x ast.Expr) ast.Expr {
  1443		switch t := unparen(x).(type) {
  1444		case *ast.ParenExpr:
  1445			panic("unreachable")
  1446		case *ast.UnaryExpr:
  1447		case *ast.ArrayType:
  1448			if len, isEllipsis := t.Len.(*ast.Ellipsis); isEllipsis {
  1449				p.error(len.Pos(), "expected array length, found '...'")
  1450				x = &ast.BadExpr{From: x.Pos(), To: p.safePos(x.End())}
  1451			}
  1452		}
  1453	
  1454		// all other nodes are expressions or types
  1455		return x
  1456	}
  1457	
  1458	// If lhs is set and the result is an identifier, it is not resolved.
  1459	func (p *parser) parsePrimaryExpr(lhs bool) ast.Expr {
  1460		if p.trace {
  1461			defer un(trace(p, "PrimaryExpr"))
  1462		}
  1463	
  1464		x := p.parseOperand(lhs)
  1465	L:
  1466		for {
  1467			switch p.tok {
  1468			case token.PERIOD:
  1469				p.next()
  1470				if lhs {
  1471					p.resolve(x)
  1472				}
  1473				switch p.tok {
  1474				case token.IDENT:
  1475					x = p.parseSelector(p.checkExprOrType(x))
  1476				case token.LPAREN:
  1477					x = p.parseTypeAssertion(p.checkExpr(x))
  1478				default:
  1479					pos := p.pos
  1480					p.errorExpected(pos, "selector or type assertion")
  1481					p.next() // make progress
  1482					sel := &ast.Ident{NamePos: pos, Name: "_"}
  1483					x = &ast.SelectorExpr{X: x, Sel: sel}
  1484				}
  1485			case token.LBRACK:
  1486				if lhs {
  1487					p.resolve(x)
  1488				}
  1489				x = p.parseIndexOrSlice(p.checkExpr(x))
  1490			case token.LPAREN:
  1491				if lhs {
  1492					p.resolve(x)
  1493				}
  1494				x = p.parseCallOrConversion(p.checkExprOrType(x))
  1495			case token.LBRACE:
  1496				if isLiteralType(x) && (p.exprLev >= 0 || !isTypeName(x)) {
  1497					if lhs {
  1498						p.resolve(x)
  1499					}
  1500					x = p.parseLiteralValue(x)
  1501				} else {
  1502					break L
  1503				}
  1504			default:
  1505				break L
  1506			}
  1507			lhs = false // no need to try to resolve again
  1508		}
  1509	
  1510		return x
  1511	}
  1512	
  1513	// If lhs is set and the result is an identifier, it is not resolved.
  1514	func (p *parser) parseUnaryExpr(lhs bool) ast.Expr {
  1515		if p.trace {
  1516			defer un(trace(p, "UnaryExpr"))
  1517		}
  1518	
  1519		switch p.tok {
  1520		case token.ADD, token.SUB, token.NOT, token.XOR, token.AND:
  1521			pos, op := p.pos, p.tok
  1522			p.next()
  1523			x := p.parseUnaryExpr(false)
  1524			return &ast.UnaryExpr{OpPos: pos, Op: op, X: p.checkExpr(x)}
  1525	
  1526		case token.ARROW:
  1527			// channel type or receive expression
  1528			arrow := p.pos
  1529			p.next()
  1530	
  1531			// If the next token is token.CHAN we still don't know if it
  1532			// is a channel type or a receive operation - we only know
  1533			// once we have found the end of the unary expression. There
  1534			// are two cases:
  1535			//
  1536			//   <- type  => (<-type) must be channel type
  1537			//   <- expr  => <-(expr) is a receive from an expression
  1538			//
  1539			// In the first case, the arrow must be re-associated with
  1540			// the channel type parsed already:
  1541			//
  1542			//   <- (chan type)    =>  (<-chan type)
  1543			//   <- (chan<- type)  =>  (<-chan (<-type))
  1544	
  1545			x := p.parseUnaryExpr(false)
  1546	
  1547			// determine which case we have
  1548			if typ, ok := x.(*ast.ChanType); ok {
  1549				// (<-type)
  1550	
  1551				// re-associate position info and <-
  1552				dir := ast.SEND
  1553				for ok && dir == ast.SEND {
  1554					if typ.Dir == ast.RECV {
  1555						// error: (<-type) is (<-(<-chan T))
  1556						p.errorExpected(typ.Arrow, "'chan'")
  1557					}
  1558					arrow, typ.Begin, typ.Arrow = typ.Arrow, arrow, arrow
  1559					dir, typ.Dir = typ.Dir, ast.RECV
  1560					typ, ok = typ.Value.(*ast.ChanType)
  1561				}
  1562				if dir == ast.SEND {
  1563					p.errorExpected(arrow, "channel type")
  1564				}
  1565	
  1566				return x
  1567			}
  1568	
  1569			// <-(expr)
  1570			return &ast.UnaryExpr{OpPos: arrow, Op: token.ARROW, X: p.checkExpr(x)}
  1571	
  1572		case token.MUL:
  1573			// pointer type or unary "*" expression
  1574			pos := p.pos
  1575			p.next()
  1576			x := p.parseUnaryExpr(false)
  1577			return &ast.StarExpr{Star: pos, X: p.checkExprOrType(x)}
  1578		}
  1579	
  1580		return p.parsePrimaryExpr(lhs)
  1581	}
  1582	
  1583	func (p *parser) tokPrec() (token.Token, int) {
  1584		tok := p.tok
  1585		if p.inRhs && tok == token.ASSIGN {
  1586			tok = token.EQL
  1587		}
  1588		return tok, tok.Precedence()
  1589	}
  1590	
  1591	// If lhs is set and the result is an identifier, it is not resolved.
  1592	func (p *parser) parseBinaryExpr(lhs bool, prec1 int) ast.Expr {
  1593		if p.trace {
  1594			defer un(trace(p, "BinaryExpr"))
  1595		}
  1596	
  1597		x := p.parseUnaryExpr(lhs)
  1598		for _, prec := p.tokPrec(); prec >= prec1; prec-- {
  1599			for {
  1600				op, oprec := p.tokPrec()
  1601				if oprec != prec {
  1602					break
  1603				}
  1604				pos := p.expect(op)
  1605				if lhs {
  1606					p.resolve(x)
  1607					lhs = false
  1608				}
  1609				y := p.parseBinaryExpr(false, prec+1)
  1610				x = &ast.BinaryExpr{X: p.checkExpr(x), OpPos: pos, Op: op, Y: p.checkExpr(y)}
  1611			}
  1612		}
  1613	
  1614		return x
  1615	}
  1616	
  1617	// If lhs is set and the result is an identifier, it is not resolved.
  1618	// The result may be a type or even a raw type ([...]int). Callers must
  1619	// check the result (using checkExpr or checkExprOrType), depending on
  1620	// context.
  1621	func (p *parser) parseExpr(lhs bool) ast.Expr {
  1622		if p.trace {
  1623			defer un(trace(p, "Expression"))
  1624		}
  1625	
  1626		return p.parseBinaryExpr(lhs, token.LowestPrec+1)
  1627	}
  1628	
  1629	func (p *parser) parseRhs() ast.Expr {
  1630		old := p.inRhs
  1631		p.inRhs = true
  1632		x := p.checkExpr(p.parseExpr(false))
  1633		p.inRhs = old
  1634		return x
  1635	}
  1636	
  1637	func (p *parser) parseRhsOrType() ast.Expr {
  1638		old := p.inRhs
  1639		p.inRhs = true
  1640		x := p.checkExprOrType(p.parseExpr(false))
  1641		p.inRhs = old
  1642		return x
  1643	}
  1644	
  1645	// ----------------------------------------------------------------------------
  1646	// Statements
  1647	
  1648	// Parsing modes for parseSimpleStmt.
  1649	const (
  1650		basic = iota
  1651		labelOk
  1652		rangeOk
  1653	)
  1654	
  1655	// parseSimpleStmt returns true as 2nd result if it parsed the assignment
  1656	// of a range clause (with mode == rangeOk). The returned statement is an
  1657	// assignment with a right-hand side that is a single unary expression of
  1658	// the form "range x". No guarantees are given for the left-hand side.
  1659	func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) {
  1660		if p.trace {
  1661			defer un(trace(p, "SimpleStmt"))
  1662		}
  1663	
  1664		x := p.parseLhsList()
  1665	
  1666		switch p.tok {
  1667		case
  1668			token.DEFINE, token.ASSIGN, token.ADD_ASSIGN,
  1669			token.SUB_ASSIGN, token.MUL_ASSIGN, token.QUO_ASSIGN,
  1670			token.REM_ASSIGN, token.AND_ASSIGN, token.OR_ASSIGN,
  1671			token.XOR_ASSIGN, token.SHL_ASSIGN, token.SHR_ASSIGN, token.AND_NOT_ASSIGN:
  1672			// assignment statement, possibly part of a range clause
  1673			pos, tok := p.pos, p.tok
  1674			p.next()
  1675			var y []ast.Expr
  1676			isRange := false
  1677			if mode == rangeOk && p.tok == token.RANGE && (tok == token.DEFINE || tok == token.ASSIGN) {
  1678				pos := p.pos
  1679				p.next()
  1680				y = []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
  1681				isRange = true
  1682			} else {
  1683				y = p.parseRhsList()
  1684			}
  1685			as := &ast.AssignStmt{Lhs: x, TokPos: pos, Tok: tok, Rhs: y}
  1686			if tok == token.DEFINE {
  1687				p.shortVarDecl(as, x)
  1688			}
  1689			return as, isRange
  1690		}
  1691	
  1692		if len(x) > 1 {
  1693			p.errorExpected(x[0].Pos(), "1 expression")
  1694			// continue with first expression
  1695		}
  1696	
  1697		switch p.tok {
  1698		case token.COLON:
  1699			// labeled statement
  1700			colon := p.pos
  1701			p.next()
  1702			if label, isIdent := x[0].(*ast.Ident); mode == labelOk && isIdent {
  1703				// Go spec: The scope of a label is the body of the function
  1704				// in which it is declared and excludes the body of any nested
  1705				// function.
  1706				stmt := &ast.LabeledStmt{Label: label, Colon: colon, Stmt: p.parseStmt()}
  1707				p.declare(stmt, nil, p.labelScope, ast.Lbl, label)
  1708				return stmt, false
  1709			}
  1710			// The label declaration typically starts at x[0].Pos(), but the label
  1711			// declaration may be erroneous due to a token after that position (and
  1712			// before the ':'). If SpuriousErrors is not set, the (only) error re-
  1713			// ported for the line is the illegal label error instead of the token
  1714			// before the ':' that caused the problem. Thus, use the (latest) colon
  1715			// position for error reporting.
  1716			p.error(colon, "illegal label declaration")
  1717			return &ast.BadStmt{From: x[0].Pos(), To: colon + 1}, false
  1718	
  1719		case token.ARROW:
  1720			// send statement
  1721			arrow := p.pos
  1722			p.next()
  1723			y := p.parseRhs()
  1724			return &ast.SendStmt{Chan: x[0], Arrow: arrow, Value: y}, false
  1725	
  1726		case token.INC, token.DEC:
  1727			// increment or decrement
  1728			s := &ast.IncDecStmt{X: x[0], TokPos: p.pos, Tok: p.tok}
  1729			p.next()
  1730			return s, false
  1731		}
  1732	
  1733		// expression
  1734		return &ast.ExprStmt{X: x[0]}, false
  1735	}
  1736	
  1737	func (p *parser) parseCallExpr(callType string) *ast.CallExpr {
  1738		x := p.parseRhsOrType() // could be a conversion: (some type)(x)
  1739		if call, isCall := x.(*ast.CallExpr); isCall {
  1740			return call
  1741		}
  1742		if _, isBad := x.(*ast.BadExpr); !isBad {
  1743			// only report error if it's a new one
  1744			p.error(p.safePos(x.End()), fmt.Sprintf("function must be invoked in %s statement", callType))
  1745		}
  1746		return nil
  1747	}
  1748	
  1749	func (p *parser) parseGoStmt() ast.Stmt {
  1750		if p.trace {
  1751			defer un(trace(p, "GoStmt"))
  1752		}
  1753	
  1754		pos := p.expect(token.GO)
  1755		call := p.parseCallExpr("go")
  1756		p.expectSemi()
  1757		if call == nil {
  1758			return &ast.BadStmt{From: pos, To: pos + 2} // len("go")
  1759		}
  1760	
  1761		return &ast.GoStmt{Go: pos, Call: call}
  1762	}
  1763	
  1764	func (p *parser) parseDeferStmt() ast.Stmt {
  1765		if p.trace {
  1766			defer un(trace(p, "DeferStmt"))
  1767		}
  1768	
  1769		pos := p.expect(token.DEFER)
  1770		call := p.parseCallExpr("defer")
  1771		p.expectSemi()
  1772		if call == nil {
  1773			return &ast.BadStmt{From: pos, To: pos + 5} // len("defer")
  1774		}
  1775	
  1776		return &ast.DeferStmt{Defer: pos, Call: call}
  1777	}
  1778	
  1779	func (p *parser) parseReturnStmt() *ast.ReturnStmt {
  1780		if p.trace {
  1781			defer un(trace(p, "ReturnStmt"))
  1782		}
  1783	
  1784		pos := p.pos
  1785		p.expect(token.RETURN)
  1786		var x []ast.Expr
  1787		if p.tok != token.SEMICOLON && p.tok != token.RBRACE {
  1788			x = p.parseRhsList()
  1789		}
  1790		p.expectSemi()
  1791	
  1792		return &ast.ReturnStmt{Return: pos, Results: x}
  1793	}
  1794	
  1795	func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt {
  1796		if p.trace {
  1797			defer un(trace(p, "BranchStmt"))
  1798		}
  1799	
  1800		pos := p.expect(tok)
  1801		var label *ast.Ident
  1802		if tok != token.FALLTHROUGH && p.tok == token.IDENT {
  1803			label = p.parseIdent()
  1804			// add to list of unresolved targets
  1805			n := len(p.targetStack) - 1
  1806			p.targetStack[n] = append(p.targetStack[n], label)
  1807		}
  1808		p.expectSemi()
  1809	
  1810		return &ast.BranchStmt{TokPos: pos, Tok: tok, Label: label}
  1811	}
  1812	
  1813	func (p *parser) makeExpr(s ast.Stmt, kind string) ast.Expr {
  1814		if s == nil {
  1815			return nil
  1816		}
  1817		if es, isExpr := s.(*ast.ExprStmt); isExpr {
  1818			return p.checkExpr(es.X)
  1819		}
  1820		p.error(s.Pos(), fmt.Sprintf("expected %s, found simple statement (missing parentheses around composite literal?)", kind))
  1821		return &ast.BadExpr{From: s.Pos(), To: p.safePos(s.End())}
  1822	}
  1823	
  1824	func (p *parser) parseIfStmt() *ast.IfStmt {
  1825		if p.trace {
  1826			defer un(trace(p, "IfStmt"))
  1827		}
  1828	
  1829		pos := p.expect(token.IF)
  1830		p.openScope()
  1831		defer p.closeScope()
  1832	
  1833		var s ast.Stmt
  1834		var x ast.Expr
  1835		{
  1836			prevLev := p.exprLev
  1837			p.exprLev = -1
  1838			if p.tok == token.SEMICOLON {
  1839				p.next()
  1840				x = p.parseRhs()
  1841			} else {
  1842				s, _ = p.parseSimpleStmt(basic)
  1843				if p.tok == token.SEMICOLON {
  1844					p.next()
  1845					x = p.parseRhs()
  1846				} else {
  1847					x = p.makeExpr(s, "boolean expression")
  1848					s = nil
  1849				}
  1850			}
  1851			p.exprLev = prevLev
  1852		}
  1853	
  1854		body := p.parseBlockStmt()
  1855		var else_ ast.Stmt
  1856		if p.tok == token.ELSE {
  1857			p.next()
  1858			else_ = p.parseStmt()
  1859		} else {
  1860			p.expectSemi()
  1861		}
  1862	
  1863		return &ast.IfStmt{If: pos, Init: s, Cond: x, Body: body, Else: else_}
  1864	}
  1865	
  1866	func (p *parser) parseTypeList() (list []ast.Expr) {
  1867		if p.trace {
  1868			defer un(trace(p, "TypeList"))
  1869		}
  1870	
  1871		list = append(list, p.parseType())
  1872		for p.tok == token.COMMA {
  1873			p.next()
  1874			list = append(list, p.parseType())
  1875		}
  1876	
  1877		return
  1878	}
  1879	
  1880	func (p *parser) parseCaseClause(typeSwitch bool) *ast.CaseClause {
  1881		if p.trace {
  1882			defer un(trace(p, "CaseClause"))
  1883		}
  1884	
  1885		pos := p.pos
  1886		var list []ast.Expr
  1887		if p.tok == token.CASE {
  1888			p.next()
  1889			if typeSwitch {
  1890				list = p.parseTypeList()
  1891			} else {
  1892				list = p.parseRhsList()
  1893			}
  1894		} else {
  1895			p.expect(token.DEFAULT)
  1896		}
  1897	
  1898		colon := p.expect(token.COLON)
  1899		p.openScope()
  1900		body := p.parseStmtList()
  1901		p.closeScope()
  1902	
  1903		return &ast.CaseClause{Case: pos, List: list, Colon: colon, Body: body}
  1904	}
  1905	
  1906	func isTypeSwitchAssert(x ast.Expr) bool {
  1907		a, ok := x.(*ast.TypeAssertExpr)
  1908		return ok && a.Type == nil
  1909	}
  1910	
  1911	func isTypeSwitchGuard(s ast.Stmt) bool {
  1912		switch t := s.(type) {
  1913		case *ast.ExprStmt:
  1914			// x.(nil)
  1915			return isTypeSwitchAssert(t.X)
  1916		case *ast.AssignStmt:
  1917			// v := x.(nil)
  1918			return len(t.Lhs) == 1 && t.Tok == token.DEFINE && len(t.Rhs) == 1 && isTypeSwitchAssert(t.Rhs[0])
  1919		}
  1920		return false
  1921	}
  1922	
  1923	func (p *parser) parseSwitchStmt() ast.Stmt {
  1924		if p.trace {
  1925			defer un(trace(p, "SwitchStmt"))
  1926		}
  1927	
  1928		pos := p.expect(token.SWITCH)
  1929		p.openScope()
  1930		defer p.closeScope()
  1931	
  1932		var s1, s2 ast.Stmt
  1933		if p.tok != token.LBRACE {
  1934			prevLev := p.exprLev
  1935			p.exprLev = -1
  1936			if p.tok != token.SEMICOLON {
  1937				s2, _ = p.parseSimpleStmt(basic)
  1938			}
  1939			if p.tok == token.SEMICOLON {
  1940				p.next()
  1941				s1 = s2
  1942				s2 = nil
  1943				if p.tok != token.LBRACE {
  1944					// A TypeSwitchGuard may declare a variable in addition
  1945					// to the variable declared in the initial SimpleStmt.
  1946					// Introduce extra scope to avoid redeclaration errors:
  1947					//
  1948					//	switch t := 0; t := x.(T) { ... }
  1949					//
  1950					// (this code is not valid Go because the first t
  1951					// cannot be accessed and thus is never used, the extra
  1952					// scope is needed for the correct error message).
  1953					//
  1954					// If we don't have a type switch, s2 must be an expression.
  1955					// Having the extra nested but empty scope won't affect it.
  1956					p.openScope()
  1957					defer p.closeScope()
  1958					s2, _ = p.parseSimpleStmt(basic)
  1959				}
  1960			}
  1961			p.exprLev = prevLev
  1962		}
  1963	
  1964		typeSwitch := isTypeSwitchGuard(s2)
  1965		lbrace := p.expect(token.LBRACE)
  1966		var list []ast.Stmt
  1967		for p.tok == token.CASE || p.tok == token.DEFAULT {
  1968			list = append(list, p.parseCaseClause(typeSwitch))
  1969		}
  1970		rbrace := p.expect(token.RBRACE)
  1971		p.expectSemi()
  1972		body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
  1973	
  1974		if typeSwitch {
  1975			return &ast.TypeSwitchStmt{Switch: pos, Init: s1, Assign: s2, Body: body}
  1976		}
  1977	
  1978		return &ast.SwitchStmt{Switch: pos, Init: s1, Tag: p.makeExpr(s2, "switch expression"), Body: body}
  1979	}
  1980	
  1981	func (p *parser) parseCommClause() *ast.CommClause {
  1982		if p.trace {
  1983			defer un(trace(p, "CommClause"))
  1984		}
  1985	
  1986		p.openScope()
  1987		pos := p.pos
  1988		var comm ast.Stmt
  1989		if p.tok == token.CASE {
  1990			p.next()
  1991			lhs := p.parseLhsList()
  1992			if p.tok == token.ARROW {
  1993				// SendStmt
  1994				if len(lhs) > 1 {
  1995					p.errorExpected(lhs[0].Pos(), "1 expression")
  1996					// continue with first expression
  1997				}
  1998				arrow := p.pos
  1999				p.next()
  2000				rhs := p.parseRhs()
  2001				comm = &ast.SendStmt{Chan: lhs[0], Arrow: arrow, Value: rhs}
  2002			} else {
  2003				// RecvStmt
  2004				if tok := p.tok; tok == token.ASSIGN || tok == token.DEFINE {
  2005					// RecvStmt with assignment
  2006					if len(lhs) > 2 {
  2007						p.errorExpected(lhs[0].Pos(), "1 or 2 expressions")
  2008						// continue with first two expressions
  2009						lhs = lhs[0:2]
  2010					}
  2011					pos := p.pos
  2012					p.next()
  2013					rhs := p.parseRhs()
  2014					as := &ast.AssignStmt{Lhs: lhs, TokPos: pos, Tok: tok, Rhs: []ast.Expr{rhs}}
  2015					if tok == token.DEFINE {
  2016						p.shortVarDecl(as, lhs)
  2017					}
  2018					comm = as
  2019				} else {
  2020					// lhs must be single receive operation
  2021					if len(lhs) > 1 {
  2022						p.errorExpected(lhs[0].Pos(), "1 expression")
  2023						// continue with first expression
  2024					}
  2025					comm = &ast.ExprStmt{X: lhs[0]}
  2026				}
  2027			}
  2028		} else {
  2029			p.expect(token.DEFAULT)
  2030		}
  2031	
  2032		colon := p.expect(token.COLON)
  2033		body := p.parseStmtList()
  2034		p.closeScope()
  2035	
  2036		return &ast.CommClause{Case: pos, Comm: comm, Colon: colon, Body: body}
  2037	}
  2038	
  2039	func (p *parser) parseSelectStmt() *ast.SelectStmt {
  2040		if p.trace {
  2041			defer un(trace(p, "SelectStmt"))
  2042		}
  2043	
  2044		pos := p.expect(token.SELECT)
  2045		lbrace := p.expect(token.LBRACE)
  2046		var list []ast.Stmt
  2047		for p.tok == token.CASE || p.tok == token.DEFAULT {
  2048			list = append(list, p.parseCommClause())
  2049		}
  2050		rbrace := p.expect(token.RBRACE)
  2051		p.expectSemi()
  2052		body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
  2053	
  2054		return &ast.SelectStmt{Select: pos, Body: body}
  2055	}
  2056	
  2057	func (p *parser) parseForStmt() ast.Stmt {
  2058		if p.trace {
  2059			defer un(trace(p, "ForStmt"))
  2060		}
  2061	
  2062		pos := p.expect(token.FOR)
  2063		p.openScope()
  2064		defer p.closeScope()
  2065	
  2066		var s1, s2, s3 ast.Stmt
  2067		var isRange bool
  2068		if p.tok != token.LBRACE {
  2069			prevLev := p.exprLev
  2070			p.exprLev = -1
  2071			if p.tok != token.SEMICOLON {
  2072				if p.tok == token.RANGE {
  2073					// "for range x" (nil lhs in assignment)
  2074					pos := p.pos
  2075					p.next()
  2076					y := []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
  2077					s2 = &ast.AssignStmt{Rhs: y}
  2078					isRange = true
  2079				} else {
  2080					s2, isRange = p.parseSimpleStmt(rangeOk)
  2081				}
  2082			}
  2083			if !isRange && p.tok == token.SEMICOLON {
  2084				p.next()
  2085				s1 = s2
  2086				s2 = nil
  2087				if p.tok != token.SEMICOLON {
  2088					s2, _ = p.parseSimpleStmt(basic)
  2089				}
  2090				p.expectSemi()
  2091				if p.tok != token.LBRACE {
  2092					s3, _ = p.parseSimpleStmt(basic)
  2093				}
  2094			}
  2095			p.exprLev = prevLev
  2096		}
  2097	
  2098		body := p.parseBlockStmt()
  2099		p.expectSemi()
  2100	
  2101		if isRange {
  2102			as := s2.(*ast.AssignStmt)
  2103			// check lhs
  2104			var key, value ast.Expr
  2105			switch len(as.Lhs) {
  2106			case 0:
  2107				// nothing to do
  2108			case 1:
  2109				key = as.Lhs[0]
  2110			case 2:
  2111				key, value = as.Lhs[0], as.Lhs[1]
  2112			default:
  2113				p.errorExpected(as.Lhs[len(as.Lhs)-1].Pos(), "at most 2 expressions")
  2114				return &ast.BadStmt{From: pos, To: p.safePos(body.End())}
  2115			}
  2116			// parseSimpleStmt returned a right-hand side that
  2117			// is a single unary expression of the form "range x"
  2118			x := as.Rhs[0].(*ast.UnaryExpr).X
  2119			return &ast.RangeStmt{
  2120				For:    pos,
  2121				Key:    key,
  2122				Value:  value,
  2123				TokPos: as.TokPos,
  2124				Tok:    as.Tok,
  2125				X:      x,
  2126				Body:   body,
  2127			}
  2128		}
  2129	
  2130		// regular for statement
  2131		return &ast.ForStmt{
  2132			For:  pos,
  2133			Init: s1,
  2134			Cond: p.makeExpr(s2, "boolean or range expression"),
  2135			Post: s3,
  2136			Body: body,
  2137		}
  2138	}
  2139	
  2140	func (p *parser) parseStmt() (s ast.Stmt) {
  2141		if p.trace {
  2142			defer un(trace(p, "Statement"))
  2143		}
  2144	
  2145		switch p.tok {
  2146		case token.CONST, token.TYPE, token.VAR:
  2147			s = &ast.DeclStmt{Decl: p.parseDecl(syncStmt)}
  2148		case
  2149			// tokens that may start an expression
  2150			token.IDENT, token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING, token.FUNC, token.LPAREN, // operands
  2151			token.LBRACK, token.STRUCT, token.MAP, token.CHAN, token.INTERFACE, // composite types
  2152			token.ADD, token.SUB, token.MUL, token.AND, token.XOR, token.ARROW, token.NOT: // unary operators
  2153			s, _ = p.parseSimpleStmt(labelOk)
  2154			// because of the required look-ahead, labeled statements are
  2155			// parsed by parseSimpleStmt - don't expect a semicolon after
  2156			// them
  2157			if _, isLabeledStmt := s.(*ast.LabeledStmt); !isLabeledStmt {
  2158				p.expectSemi()
  2159			}
  2160		case token.GO:
  2161			s = p.parseGoStmt()
  2162		case token.DEFER:
  2163			s = p.parseDeferStmt()
  2164		case token.RETURN:
  2165			s = p.parseReturnStmt()
  2166		case token.BREAK, token.CONTINUE, token.GOTO, token.FALLTHROUGH:
  2167			s = p.parseBranchStmt(p.tok)
  2168		case token.LBRACE:
  2169			s = p.parseBlockStmt()
  2170			p.expectSemi()
  2171		case token.IF:
  2172			s = p.parseIfStmt()
  2173		case token.SWITCH:
  2174			s = p.parseSwitchStmt()
  2175		case token.SELECT:
  2176			s = p.parseSelectStmt()
  2177		case token.FOR:
  2178			s = p.parseForStmt()
  2179		case token.SEMICOLON:
  2180			// Is it ever possible to have an implicit semicolon
  2181			// producing an empty statement in a valid program?
  2182			// (handle correctly anyway)
  2183			s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: p.lit == "\n"}
  2184			p.next()
  2185		case token.RBRACE:
  2186			// a semicolon may be omitted before a closing "}"
  2187			s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: true}
  2188		default:
  2189			// no statement found
  2190			pos := p.pos
  2191			p.errorExpected(pos, "statement")
  2192			syncStmt(p)
  2193			s = &ast.BadStmt{From: pos, To: p.pos}
  2194		}
  2195	
  2196		return
  2197	}
  2198	
  2199	// ----------------------------------------------------------------------------
  2200	// Declarations
  2201	
  2202	type parseSpecFunction func(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec
  2203	
  2204	func isValidImport(lit string) bool {
  2205		const illegalChars = `!"#$%&'()*,:;<=>?[\]^{|}` + "`\uFFFD"
  2206		s, _ := strconv.Unquote(lit) // go/scanner returns a legal string literal
  2207		for _, r := range s {
  2208			if !unicode.IsGraphic(r) || unicode.IsSpace(r) || strings.ContainsRune(illegalChars, r) {
  2209				return false
  2210			}
  2211		}
  2212		return s != ""
  2213	}
  2214	
  2215	func (p *parser) parseImportSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
  2216		if p.trace {
  2217			defer un(trace(p, "ImportSpec"))
  2218		}
  2219	
  2220		var ident *ast.Ident
  2221		switch p.tok {
  2222		case token.PERIOD:
  2223			ident = &ast.Ident{NamePos: p.pos, Name: "."}
  2224			p.next()
  2225		case token.IDENT:
  2226			ident = p.parseIdent()
  2227		}
  2228	
  2229		pos := p.pos
  2230		var path string
  2231		if p.tok == token.STRING {
  2232			path = p.lit
  2233			if !isValidImport(path) {
  2234				p.error(pos, "invalid import path: "+path)
  2235			}
  2236			p.next()
  2237		} else {
  2238			p.expect(token.STRING) // use expect() error handling
  2239		}
  2240		p.expectSemi() // call before accessing p.linecomment
  2241	
  2242		// collect imports
  2243		spec := &ast.ImportSpec{
  2244			Doc:     doc,
  2245			Name:    ident,
  2246			Path:    &ast.BasicLit{ValuePos: pos, Kind: token.STRING, Value: path},
  2247			Comment: p.lineComment,
  2248		}
  2249		p.imports = append(p.imports, spec)
  2250	
  2251		return spec
  2252	}
  2253	
  2254	func (p *parser) parseValueSpec(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec {
  2255		if p.trace {
  2256			defer un(trace(p, keyword.String()+"Spec"))
  2257		}
  2258	
  2259		pos := p.pos
  2260		idents := p.parseIdentList()
  2261		typ := p.tryType()
  2262		var values []ast.Expr
  2263		// always permit optional initialization for more tolerant parsing
  2264		if p.tok == token.ASSIGN {
  2265			p.next()
  2266			values = p.parseRhsList()
  2267		}
  2268		p.expectSemi() // call before accessing p.linecomment
  2269	
  2270		switch keyword {
  2271		case token.VAR:
  2272			if typ == nil && values == nil {
  2273				p.error(pos, "missing variable type or initialization")
  2274			}
  2275		case token.CONST:
  2276			if values == nil && (iota == 0 || typ != nil) {
  2277				p.error(pos, "missing constant value")
  2278			}
  2279		}
  2280	
  2281		// Go spec: The scope of a constant or variable identifier declared inside
  2282		// a function begins at the end of the ConstSpec or VarSpec and ends at
  2283		// the end of the innermost containing block.
  2284		// (Global identifiers are resolved in a separate phase after parsing.)
  2285		spec := &ast.ValueSpec{
  2286			Doc:     doc,
  2287			Names:   idents,
  2288			Type:    typ,
  2289			Values:  values,
  2290			Comment: p.lineComment,
  2291		}
  2292		kind := ast.Con
  2293		if keyword == token.VAR {
  2294			kind = ast.Var
  2295		}
  2296		p.declare(spec, iota, p.topScope, kind, idents...)
  2297	
  2298		return spec
  2299	}
  2300	
  2301	func (p *parser) parseTypeSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
  2302		if p.trace {
  2303			defer un(trace(p, "TypeSpec"))
  2304		}
  2305	
  2306		ident := p.parseIdent()
  2307	
  2308		// Go spec: The scope of a type identifier declared inside a function begins
  2309		// at the identifier in the TypeSpec and ends at the end of the innermost
  2310		// containing block.
  2311		// (Global identifiers are resolved in a separate phase after parsing.)
  2312		spec := &ast.TypeSpec{Doc: doc, Name: ident}
  2313		p.declare(spec, nil, p.topScope, ast.Typ, ident)
  2314	
  2315		spec.Type = p.parseType()
  2316		p.expectSemi() // call before accessing p.linecomment
  2317		spec.Comment = p.lineComment
  2318	
  2319		return spec
  2320	}
  2321	
  2322	func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl {
  2323		if p.trace {
  2324			defer un(trace(p, "GenDecl("+keyword.String()+")"))
  2325		}
  2326	
  2327		doc := p.leadComment
  2328		pos := p.expect(keyword)
  2329		var lparen, rparen token.Pos
  2330		var list []ast.Spec
  2331		if p.tok == token.LPAREN {
  2332			lparen = p.pos
  2333			p.next()
  2334			for iota := 0; p.tok != token.RPAREN && p.tok != token.EOF; iota++ {
  2335				list = append(list, f(p.leadComment, keyword, iota))
  2336			}
  2337			rparen = p.expect(token.RPAREN)
  2338			p.expectSemi()
  2339		} else {
  2340			list = append(list, f(nil, keyword, 0))
  2341		}
  2342	
  2343		return &ast.GenDecl{
  2344			Doc:    doc,
  2345			TokPos: pos,
  2346			Tok:    keyword,
  2347			Lparen: lparen,
  2348			Specs:  list,
  2349			Rparen: rparen,
  2350		}
  2351	}
  2352	
  2353	func (p *parser) parseFuncDecl() *ast.FuncDecl {
  2354		if p.trace {
  2355			defer un(trace(p, "FunctionDecl"))
  2356		}
  2357	
  2358		doc := p.leadComment
  2359		pos := p.expect(token.FUNC)
  2360		scope := ast.NewScope(p.topScope) // function scope
  2361	
  2362		var recv *ast.FieldList
  2363		if p.tok == token.LPAREN {
  2364			recv = p.parseParameters(scope, false)
  2365		}
  2366	
  2367		ident := p.parseIdent()
  2368	
  2369		params, results := p.parseSignature(scope)
  2370	
  2371		var body *ast.BlockStmt
  2372		if p.tok == token.LBRACE {
  2373			body = p.parseBody(scope)
  2374		}
  2375		p.expectSemi()
  2376	
  2377		decl := &ast.FuncDecl{
  2378			Doc:  doc,
  2379			Recv: recv,
  2380			Name: ident,
  2381			Type: &ast.FuncType{
  2382				Func:    pos,
  2383				Params:  params,
  2384				Results: results,
  2385			},
  2386			Body: body,
  2387		}
  2388		if recv == nil {
  2389			// Go spec: The scope of an identifier denoting a constant, type,
  2390			// variable, or function (but not method) declared at top level
  2391			// (outside any function) is the package block.
  2392			//
  2393			// init() functions cannot be referred to and there may
  2394			// be more than one - don't put them in the pkgScope
  2395			if ident.Name != "init" {
  2396				p.declare(decl, nil, p.pkgScope, ast.Fun, ident)
  2397			}
  2398		}
  2399	
  2400		return decl
  2401	}
  2402	
  2403	func (p *parser) parseDecl(sync func(*parser)) ast.Decl {
  2404		if p.trace {
  2405			defer un(trace(p, "Declaration"))
  2406		}
  2407	
  2408		var f parseSpecFunction
  2409		switch p.tok {
  2410		case token.CONST, token.VAR:
  2411			f = p.parseValueSpec
  2412	
  2413		case token.TYPE:
  2414			f = p.parseTypeSpec
  2415	
  2416		case token.FUNC:
  2417			return p.parseFuncDecl()
  2418	
  2419		default:
  2420			pos := p.pos
  2421			p.errorExpected(pos, "declaration")
  2422			sync(p)
  2423			return &ast.BadDecl{From: pos, To: p.pos}
  2424		}
  2425	
  2426		return p.parseGenDecl(p.tok, f)
  2427	}
  2428	
  2429	// ----------------------------------------------------------------------------
  2430	// Source files
  2431	
  2432	func (p *parser) parseFile() *ast.File {
  2433		if p.trace {
  2434			defer un(trace(p, "File"))
  2435		}
  2436	
  2437		// Don't bother parsing the rest if we had errors scanning the first token.
  2438		// Likely not a Go source file at all.
  2439		if p.errors.Len() != 0 {
  2440			return nil
  2441		}
  2442	
  2443		// package clause
  2444		doc := p.leadComment
  2445		pos := p.expect(token.PACKAGE)
  2446		// Go spec: The package clause is not a declaration;
  2447		// the package name does not appear in any scope.
  2448		ident := p.parseIdent()
  2449		if ident.Name == "_" && p.mode&DeclarationErrors != 0 {
  2450			p.error(p.pos, "invalid package name _")
  2451		}
  2452		p.expectSemi()
  2453	
  2454		// Don't bother parsing the rest if we had errors parsing the package clause.
  2455		// Likely not a Go source file at all.
  2456		if p.errors.Len() != 0 {
  2457			return nil
  2458		}
  2459	
  2460		p.openScope()
  2461		p.pkgScope = p.topScope
  2462		var decls []ast.Decl
  2463		if p.mode&PackageClauseOnly == 0 {
  2464			// import decls
  2465			for p.tok == token.IMPORT {
  2466				decls = append(decls, p.parseGenDecl(token.IMPORT, p.parseImportSpec))
  2467			}
  2468	
  2469			if p.mode&ImportsOnly == 0 {
  2470				// rest of package body
  2471				for p.tok != token.EOF {
  2472					decls = append(decls, p.parseDecl(syncDecl))
  2473				}
  2474			}
  2475		}
  2476		p.closeScope()
  2477		assert(p.topScope == nil, "unbalanced scopes")
  2478		assert(p.labelScope == nil, "unbalanced label scopes")
  2479	
  2480		// resolve global identifiers within the same file
  2481		i := 0
  2482		for _, ident := range p.unresolved {
  2483			// i <= index for current ident
  2484			assert(ident.Obj == unresolved, "object already resolved")
  2485			ident.Obj = p.pkgScope.Lookup(ident.Name) // also removes unresolved sentinel
  2486			if ident.Obj == nil {
  2487				p.unresolved[i] = ident
  2488				i++
  2489			}
  2490		}
  2491	
  2492		return &ast.File{
  2493			Doc:        doc,
  2494			Package:    pos,
  2495			Name:       ident,
  2496			Decls:      decls,
  2497			Scope:      p.pkgScope,
  2498			Imports:    p.imports,
  2499			Unresolved: p.unresolved[0:i],
  2500			Comments:   p.comments,
  2501		}
  2502	}
  2503	

View as plain text