...
Run Format

Source file src/pkg/go/parser/parser.go

     1	// Copyright 2009 The Go Authors. All rights reserved.
     2	// Use of this source code is governed by a BSD-style
     3	// license that can be found in the LICENSE file.
     4	
     5	// Package parser implements a parser for Go source files. Input may be
     6	// provided in a variety of forms (see the various Parse* functions); the
     7	// output is an abstract syntax tree (AST) representing the Go source. The
     8	// parser is invoked through one of the Parse* functions.
     9	//
    10	package parser
    11	
    12	import (
    13		"fmt"
    14		"go/ast"
    15		"go/scanner"
    16		"go/token"
    17		"strconv"
    18		"strings"
    19		"unicode"
    20	)
    21	
    22	// The parser structure holds the parser's internal state.
    23	type parser struct {
    24		file    *token.File
    25		errors  scanner.ErrorList
    26		scanner scanner.Scanner
    27	
    28		// Tracing/debugging
    29		mode   Mode // parsing mode
    30		trace  bool // == (mode & Trace != 0)
    31		indent int  // indentation used for tracing output
    32	
    33		// Comments
    34		comments    []*ast.CommentGroup
    35		leadComment *ast.CommentGroup // last lead comment
    36		lineComment *ast.CommentGroup // last line comment
    37	
    38		// Next token
    39		pos token.Pos   // token position
    40		tok token.Token // one token look-ahead
    41		lit string      // token literal
    42	
    43		// Error recovery
    44		// (used to limit the number of calls to syncXXX functions
    45		// w/o making scanning progress - avoids potential endless
    46		// loops across multiple parser functions during error recovery)
    47		syncPos token.Pos // last synchronization position
    48		syncCnt int       // number of calls to syncXXX without progress
    49	
    50		// Non-syntactic parser control
    51		exprLev int  // < 0: in control clause, >= 0: in expression
    52		inRhs   bool // if set, the parser is parsing a rhs expression
    53	
    54		// Ordinary identifier scopes
    55		pkgScope   *ast.Scope        // pkgScope.Outer == nil
    56		topScope   *ast.Scope        // top-most scope; may be pkgScope
    57		unresolved []*ast.Ident      // unresolved identifiers
    58		imports    []*ast.ImportSpec // list of imports
    59	
    60		// Label scopes
    61		// (maintained by open/close LabelScope)
    62		labelScope  *ast.Scope     // label scope for current function
    63		targetStack [][]*ast.Ident // stack of unresolved labels
    64	}
    65	
    66	func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode Mode) {
    67		p.file = fset.AddFile(filename, -1, len(src))
    68		var m scanner.Mode
    69		if mode&ParseComments != 0 {
    70			m = scanner.ScanComments
    71		}
    72		eh := func(pos token.Position, msg string) { p.errors.Add(pos, msg) }
    73		p.scanner.Init(p.file, src, eh, m)
    74	
    75		p.mode = mode
    76		p.trace = mode&Trace != 0 // for convenience (p.trace is used frequently)
    77	
    78		p.next()
    79	}
    80	
    81	// ----------------------------------------------------------------------------
    82	// Scoping support
    83	
    84	func (p *parser) openScope() {
    85		p.topScope = ast.NewScope(p.topScope)
    86	}
    87	
    88	func (p *parser) closeScope() {
    89		p.topScope = p.topScope.Outer
    90	}
    91	
    92	func (p *parser) openLabelScope() {
    93		p.labelScope = ast.NewScope(p.labelScope)
    94		p.targetStack = append(p.targetStack, nil)
    95	}
    96	
    97	func (p *parser) closeLabelScope() {
    98		// resolve labels
    99		n := len(p.targetStack) - 1
   100		scope := p.labelScope
   101		for _, ident := range p.targetStack[n] {
   102			ident.Obj = scope.Lookup(ident.Name)
   103			if ident.Obj == nil && p.mode&DeclarationErrors != 0 {
   104				p.error(ident.Pos(), fmt.Sprintf("label %s undefined", ident.Name))
   105			}
   106		}
   107		// pop label scope
   108		p.targetStack = p.targetStack[0:n]
   109		p.labelScope = p.labelScope.Outer
   110	}
   111	
   112	func (p *parser) declare(decl, data interface{}, scope *ast.Scope, kind ast.ObjKind, idents ...*ast.Ident) {
   113		for _, ident := range idents {
   114			assert(ident.Obj == nil, "identifier already declared or resolved")
   115			obj := ast.NewObj(kind, ident.Name)
   116			// remember the corresponding declaration for redeclaration
   117			// errors and global variable resolution/typechecking phase
   118			obj.Decl = decl
   119			obj.Data = data
   120			ident.Obj = obj
   121			if ident.Name != "_" {
   122				if alt := scope.Insert(obj); alt != nil && p.mode&DeclarationErrors != 0 {
   123					prevDecl := ""
   124					if pos := alt.Pos(); pos.IsValid() {
   125						prevDecl = fmt.Sprintf("\n\tprevious declaration at %s", p.file.Position(pos))
   126					}
   127					p.error(ident.Pos(), fmt.Sprintf("%s redeclared in this block%s", ident.Name, prevDecl))
   128				}
   129			}
   130		}
   131	}
   132	
   133	func (p *parser) shortVarDecl(decl *ast.AssignStmt, list []ast.Expr) {
   134		// Go spec: A short variable declaration may redeclare variables
   135		// provided they were originally declared in the same block with
   136		// the same type, and at least one of the non-blank variables is new.
   137		n := 0 // number of new variables
   138		for _, x := range list {
   139			if ident, isIdent := x.(*ast.Ident); isIdent {
   140				assert(ident.Obj == nil, "identifier already declared or resolved")
   141				obj := ast.NewObj(ast.Var, ident.Name)
   142				// remember corresponding assignment for other tools
   143				obj.Decl = decl
   144				ident.Obj = obj
   145				if ident.Name != "_" {
   146					if alt := p.topScope.Insert(obj); alt != nil {
   147						ident.Obj = alt // redeclaration
   148					} else {
   149						n++ // new declaration
   150					}
   151				}
   152			} else {
   153				p.errorExpected(x.Pos(), "identifier on left side of :=")
   154			}
   155		}
   156		if n == 0 && p.mode&DeclarationErrors != 0 {
   157			p.error(list[0].Pos(), "no new variables on left side of :=")
   158		}
   159	}
   160	
   161	// The unresolved object is a sentinel to mark identifiers that have been added
   162	// to the list of unresolved identifiers. The sentinel is only used for verifying
   163	// internal consistency.
   164	var unresolved = new(ast.Object)
   165	
   166	// If x is an identifier, tryResolve attempts to resolve x by looking up
   167	// the object it denotes. If no object is found and collectUnresolved is
   168	// set, x is marked as unresolved and collected in the list of unresolved
   169	// identifiers.
   170	//
   171	func (p *parser) tryResolve(x ast.Expr, collectUnresolved bool) {
   172		// nothing to do if x is not an identifier or the blank identifier
   173		ident, _ := x.(*ast.Ident)
   174		if ident == nil {
   175			return
   176		}
   177		assert(ident.Obj == nil, "identifier already declared or resolved")
   178		if ident.Name == "_" {
   179			return
   180		}
   181		// try to resolve the identifier
   182		for s := p.topScope; s != nil; s = s.Outer {
   183			if obj := s.Lookup(ident.Name); obj != nil {
   184				ident.Obj = obj
   185				return
   186			}
   187		}
   188		// all local scopes are known, so any unresolved identifier
   189		// must be found either in the file scope, package scope
   190		// (perhaps in another file), or universe scope --- collect
   191		// them so that they can be resolved later
   192		if collectUnresolved {
   193			ident.Obj = unresolved
   194			p.unresolved = append(p.unresolved, ident)
   195		}
   196	}
   197	
   198	func (p *parser) resolve(x ast.Expr) {
   199		p.tryResolve(x, true)
   200	}
   201	
   202	// ----------------------------------------------------------------------------
   203	// Parsing support
   204	
   205	func (p *parser) printTrace(a ...interface{}) {
   206		const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . "
   207		const n = len(dots)
   208		pos := p.file.Position(p.pos)
   209		fmt.Printf("%5d:%3d: ", pos.Line, pos.Column)
   210		i := 2 * p.indent
   211		for i > n {
   212			fmt.Print(dots)
   213			i -= n
   214		}
   215		// i <= n
   216		fmt.Print(dots[0:i])
   217		fmt.Println(a...)
   218	}
   219	
   220	func trace(p *parser, msg string) *parser {
   221		p.printTrace(msg, "(")
   222		p.indent++
   223		return p
   224	}
   225	
   226	// Usage pattern: defer un(trace(p, "..."))
   227	func un(p *parser) {
   228		p.indent--
   229		p.printTrace(")")
   230	}
   231	
   232	// Advance to the next token.
   233	func (p *parser) next0() {
   234		// Because of one-token look-ahead, print the previous token
   235		// when tracing as it provides a more readable output. The
   236		// very first token (!p.pos.IsValid()) is not initialized
   237		// (it is token.ILLEGAL), so don't print it .
   238		if p.trace && p.pos.IsValid() {
   239			s := p.tok.String()
   240			switch {
   241			case p.tok.IsLiteral():
   242				p.printTrace(s, p.lit)
   243			case p.tok.IsOperator(), p.tok.IsKeyword():
   244				p.printTrace("\"" + s + "\"")
   245			default:
   246				p.printTrace(s)
   247			}
   248		}
   249	
   250		p.pos, p.tok, p.lit = p.scanner.Scan()
   251	}
   252	
   253	// Consume a comment and return it and the line on which it ends.
   254	func (p *parser) consumeComment() (comment *ast.Comment, endline int) {
   255		// /*-style comments may end on a different line than where they start.
   256		// Scan the comment for '\n' chars and adjust endline accordingly.
   257		endline = p.file.Line(p.pos)
   258		if p.lit[1] == '*' {
   259			// don't use range here - no need to decode Unicode code points
   260			for i := 0; i < len(p.lit); i++ {
   261				if p.lit[i] == '\n' {
   262					endline++
   263				}
   264			}
   265		}
   266	
   267		comment = &ast.Comment{Slash: p.pos, Text: p.lit}
   268		p.next0()
   269	
   270		return
   271	}
   272	
   273	// Consume a group of adjacent comments, add it to the parser's
   274	// comments list, and return it together with the line at which
   275	// the last comment in the group ends. A non-comment token or n
   276	// empty lines terminate a comment group.
   277	//
   278	func (p *parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline int) {
   279		var list []*ast.Comment
   280		endline = p.file.Line(p.pos)
   281		for p.tok == token.COMMENT && p.file.Line(p.pos) <= endline+n {
   282			var comment *ast.Comment
   283			comment, endline = p.consumeComment()
   284			list = append(list, comment)
   285		}
   286	
   287		// add comment group to the comments list
   288		comments = &ast.CommentGroup{List: list}
   289		p.comments = append(p.comments, comments)
   290	
   291		return
   292	}
   293	
   294	// Advance to the next non-comment token. In the process, collect
   295	// any comment groups encountered, and remember the last lead and
   296	// and line comments.
   297	//
   298	// A lead comment is a comment group that starts and ends in a
   299	// line without any other tokens and that is followed by a non-comment
   300	// token on the line immediately after the comment group.
   301	//
   302	// A line comment is a comment group that follows a non-comment
   303	// token on the same line, and that has no tokens after it on the line
   304	// where it ends.
   305	//
   306	// Lead and line comments may be considered documentation that is
   307	// stored in the AST.
   308	//
   309	func (p *parser) next() {
   310		p.leadComment = nil
   311		p.lineComment = nil
   312		prev := p.pos
   313		p.next0()
   314	
   315		if p.tok == token.COMMENT {
   316			var comment *ast.CommentGroup
   317			var endline int
   318	
   319			if p.file.Line(p.pos) == p.file.Line(prev) {
   320				// The comment is on same line as the previous token; it
   321				// cannot be a lead comment but may be a line comment.
   322				comment, endline = p.consumeCommentGroup(0)
   323				if p.file.Line(p.pos) != endline {
   324					// The next token is on a different line, thus
   325					// the last comment group is a line comment.
   326					p.lineComment = comment
   327				}
   328			}
   329	
   330			// consume successor comments, if any
   331			endline = -1
   332			for p.tok == token.COMMENT {
   333				comment, endline = p.consumeCommentGroup(1)
   334			}
   335	
   336			if endline+1 == p.file.Line(p.pos) {
   337				// The next token is following on the line immediately after the
   338				// comment group, thus the last comment group is a lead comment.
   339				p.leadComment = comment
   340			}
   341		}
   342	}
   343	
   344	// A bailout panic is raised to indicate early termination.
   345	type bailout struct{}
   346	
   347	func (p *parser) error(pos token.Pos, msg string) {
   348		epos := p.file.Position(pos)
   349	
   350		// If AllErrors is not set, discard errors reported on the same line
   351		// as the last recorded error and stop parsing if there are more than
   352		// 10 errors.
   353		if p.mode&AllErrors == 0 {
   354			n := len(p.errors)
   355			if n > 0 && p.errors[n-1].Pos.Line == epos.Line {
   356				return // discard - likely a spurious error
   357			}
   358			if n > 10 {
   359				panic(bailout{})
   360			}
   361		}
   362	
   363		p.errors.Add(epos, msg)
   364	}
   365	
   366	func (p *parser) errorExpected(pos token.Pos, msg string) {
   367		msg = "expected " + msg
   368		if pos == p.pos {
   369			// the error happened at the current position;
   370			// make the error message more specific
   371			if p.tok == token.SEMICOLON && p.lit == "\n" {
   372				msg += ", found newline"
   373			} else {
   374				msg += ", found '" + p.tok.String() + "'"
   375				if p.tok.IsLiteral() {
   376					msg += " " + p.lit
   377				}
   378			}
   379		}
   380		p.error(pos, msg)
   381	}
   382	
   383	func (p *parser) expect(tok token.Token) token.Pos {
   384		pos := p.pos
   385		if p.tok != tok {
   386			p.errorExpected(pos, "'"+tok.String()+"'")
   387		}
   388		p.next() // make progress
   389		return pos
   390	}
   391	
   392	// expectClosing is like expect but provides a better error message
   393	// for the common case of a missing comma before a newline.
   394	//
   395	func (p *parser) expectClosing(tok token.Token, context string) token.Pos {
   396		if p.tok != tok && p.tok == token.SEMICOLON && p.lit == "\n" {
   397			p.error(p.pos, "missing ',' before newline in "+context)
   398			p.next()
   399		}
   400		return p.expect(tok)
   401	}
   402	
   403	func (p *parser) expectSemi() {
   404		// semicolon is optional before a closing ')' or '}'
   405		if p.tok != token.RPAREN && p.tok != token.RBRACE {
   406			if p.tok == token.SEMICOLON {
   407				p.next()
   408			} else {
   409				p.errorExpected(p.pos, "';'")
   410				syncStmt(p)
   411			}
   412		}
   413	}
   414	
   415	func (p *parser) atComma(context string) bool {
   416		if p.tok == token.COMMA {
   417			return true
   418		}
   419		if p.tok == token.SEMICOLON && p.lit == "\n" {
   420			p.error(p.pos, "missing ',' before newline in "+context)
   421			return true // "insert" the comma and continue
   422	
   423		}
   424		return false
   425	}
   426	
   427	func assert(cond bool, msg string) {
   428		if !cond {
   429			panic("go/parser internal error: " + msg)
   430		}
   431	}
   432	
   433	// syncStmt advances to the next statement.
   434	// Used for synchronization after an error.
   435	//
   436	func syncStmt(p *parser) {
   437		for {
   438			switch p.tok {
   439			case token.BREAK, token.CONST, token.CONTINUE, token.DEFER,
   440				token.FALLTHROUGH, token.FOR, token.GO, token.GOTO,
   441				token.IF, token.RETURN, token.SELECT, token.SWITCH,
   442				token.TYPE, token.VAR:
   443				// Return only if parser made some progress since last
   444				// sync or if it has not reached 10 sync calls without
   445				// progress. Otherwise consume at least one token to
   446				// avoid an endless parser loop (it is possible that
   447				// both parseOperand and parseStmt call syncStmt and
   448				// correctly do not advance, thus the need for the
   449				// invocation limit p.syncCnt).
   450				if p.pos == p.syncPos && p.syncCnt < 10 {
   451					p.syncCnt++
   452					return
   453				}
   454				if p.pos > p.syncPos {
   455					p.syncPos = p.pos
   456					p.syncCnt = 0
   457					return
   458				}
   459				// Reaching here indicates a parser bug, likely an
   460				// incorrect token list in this function, but it only
   461				// leads to skipping of possibly correct code if a
   462				// previous error is present, and thus is preferred
   463				// over a non-terminating parse.
   464			case token.EOF:
   465				return
   466			}
   467			p.next()
   468		}
   469	}
   470	
   471	// syncDecl advances to the next declaration.
   472	// Used for synchronization after an error.
   473	//
   474	func syncDecl(p *parser) {
   475		for {
   476			switch p.tok {
   477			case token.CONST, token.TYPE, token.VAR:
   478				// see comments in syncStmt
   479				if p.pos == p.syncPos && p.syncCnt < 10 {
   480					p.syncCnt++
   481					return
   482				}
   483				if p.pos > p.syncPos {
   484					p.syncPos = p.pos
   485					p.syncCnt = 0
   486					return
   487				}
   488			case token.EOF:
   489				return
   490			}
   491			p.next()
   492		}
   493	}
   494	
   495	// safePos returns a valid file position for a given position: If pos
   496	// is valid to begin with, safePos returns pos. If pos is out-of-range,
   497	// safePos returns the EOF position.
   498	//
   499	// This is hack to work around "artificial" end positions in the AST which
   500	// are computed by adding 1 to (presumably valid) token positions. If the
   501	// token positions are invalid due to parse errors, the resulting end position
   502	// may be past the file's EOF position, which would lead to panics if used
   503	// later on.
   504	//
   505	func (p *parser) safePos(pos token.Pos) (res token.Pos) {
   506		defer func() {
   507			if recover() != nil {
   508				res = token.Pos(p.file.Base() + p.file.Size()) // EOF position
   509			}
   510		}()
   511		_ = p.file.Offset(pos) // trigger a panic if position is out-of-range
   512		return pos
   513	}
   514	
   515	// ----------------------------------------------------------------------------
   516	// Identifiers
   517	
   518	func (p *parser) parseIdent() *ast.Ident {
   519		pos := p.pos
   520		name := "_"
   521		if p.tok == token.IDENT {
   522			name = p.lit
   523			p.next()
   524		} else {
   525			p.expect(token.IDENT) // use expect() error handling
   526		}
   527		return &ast.Ident{NamePos: pos, Name: name}
   528	}
   529	
   530	func (p *parser) parseIdentList() (list []*ast.Ident) {
   531		if p.trace {
   532			defer un(trace(p, "IdentList"))
   533		}
   534	
   535		list = append(list, p.parseIdent())
   536		for p.tok == token.COMMA {
   537			p.next()
   538			list = append(list, p.parseIdent())
   539		}
   540	
   541		return
   542	}
   543	
   544	// ----------------------------------------------------------------------------
   545	// Common productions
   546	
   547	// If lhs is set, result list elements which are identifiers are not resolved.
   548	func (p *parser) parseExprList(lhs bool) (list []ast.Expr) {
   549		if p.trace {
   550			defer un(trace(p, "ExpressionList"))
   551		}
   552	
   553		list = append(list, p.checkExpr(p.parseExpr(lhs)))
   554		for p.tok == token.COMMA {
   555			p.next()
   556			list = append(list, p.checkExpr(p.parseExpr(lhs)))
   557		}
   558	
   559		return
   560	}
   561	
   562	func (p *parser) parseLhsList() []ast.Expr {
   563		old := p.inRhs
   564		p.inRhs = false
   565		list := p.parseExprList(true)
   566		switch p.tok {
   567		case token.DEFINE:
   568			// lhs of a short variable declaration
   569			// but doesn't enter scope until later:
   570			// caller must call p.shortVarDecl(p.makeIdentList(list))
   571			// at appropriate time.
   572		case token.COLON:
   573			// lhs of a label declaration or a communication clause of a select
   574			// statement (parseLhsList is not called when parsing the case clause
   575			// of a switch statement):
   576			// - labels are declared by the caller of parseLhsList
   577			// - for communication clauses, if there is a stand-alone identifier
   578			//   followed by a colon, we have a syntax error; there is no need
   579			//   to resolve the identifier in that case
   580		default:
   581			// identifiers must be declared elsewhere
   582			for _, x := range list {
   583				p.resolve(x)
   584			}
   585		}
   586		p.inRhs = old
   587		return list
   588	}
   589	
   590	func (p *parser) parseRhsList() []ast.Expr {
   591		old := p.inRhs
   592		p.inRhs = true
   593		list := p.parseExprList(false)
   594		p.inRhs = old
   595		return list
   596	}
   597	
   598	// ----------------------------------------------------------------------------
   599	// Types
   600	
   601	func (p *parser) parseType() ast.Expr {
   602		if p.trace {
   603			defer un(trace(p, "Type"))
   604		}
   605	
   606		typ := p.tryType()
   607	
   608		if typ == nil {
   609			pos := p.pos
   610			p.errorExpected(pos, "type")
   611			p.next() // make progress
   612			return &ast.BadExpr{From: pos, To: p.pos}
   613		}
   614	
   615		return typ
   616	}
   617	
   618	// If the result is an identifier, it is not resolved.
   619	func (p *parser) parseTypeName() ast.Expr {
   620		if p.trace {
   621			defer un(trace(p, "TypeName"))
   622		}
   623	
   624		ident := p.parseIdent()
   625		// don't resolve ident yet - it may be a parameter or field name
   626	
   627		if p.tok == token.PERIOD {
   628			// ident is a package name
   629			p.next()
   630			p.resolve(ident)
   631			sel := p.parseIdent()
   632			return &ast.SelectorExpr{X: ident, Sel: sel}
   633		}
   634	
   635		return ident
   636	}
   637	
   638	func (p *parser) parseArrayType() ast.Expr {
   639		if p.trace {
   640			defer un(trace(p, "ArrayType"))
   641		}
   642	
   643		lbrack := p.expect(token.LBRACK)
   644		var len ast.Expr
   645		// always permit ellipsis for more fault-tolerant parsing
   646		if p.tok == token.ELLIPSIS {
   647			len = &ast.Ellipsis{Ellipsis: p.pos}
   648			p.next()
   649		} else if p.tok != token.RBRACK {
   650			len = p.parseRhs()
   651		}
   652		p.expect(token.RBRACK)
   653		elt := p.parseType()
   654	
   655		return &ast.ArrayType{Lbrack: lbrack, Len: len, Elt: elt}
   656	}
   657	
   658	func (p *parser) makeIdentList(list []ast.Expr) []*ast.Ident {
   659		idents := make([]*ast.Ident, len(list))
   660		for i, x := range list {
   661			ident, isIdent := x.(*ast.Ident)
   662			if !isIdent {
   663				if _, isBad := x.(*ast.BadExpr); !isBad {
   664					// only report error if it's a new one
   665					p.errorExpected(x.Pos(), "identifier")
   666				}
   667				ident = &ast.Ident{NamePos: x.Pos(), Name: "_"}
   668			}
   669			idents[i] = ident
   670		}
   671		return idents
   672	}
   673	
   674	func (p *parser) parseFieldDecl(scope *ast.Scope) *ast.Field {
   675		if p.trace {
   676			defer un(trace(p, "FieldDecl"))
   677		}
   678	
   679		doc := p.leadComment
   680	
   681		// FieldDecl
   682		list, typ := p.parseVarList(false)
   683	
   684		// Tag
   685		var tag *ast.BasicLit
   686		if p.tok == token.STRING {
   687			tag = &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
   688			p.next()
   689		}
   690	
   691		// analyze case
   692		var idents []*ast.Ident
   693		if typ != nil {
   694			// IdentifierList Type
   695			idents = p.makeIdentList(list)
   696		} else {
   697			// ["*"] TypeName (AnonymousField)
   698			typ = list[0] // we always have at least one element
   699			if n := len(list); n > 1 || !isTypeName(deref(typ)) {
   700				pos := typ.Pos()
   701				p.errorExpected(pos, "anonymous field")
   702				typ = &ast.BadExpr{From: pos, To: p.safePos(list[n-1].End())}
   703			}
   704		}
   705	
   706		p.expectSemi() // call before accessing p.linecomment
   707	
   708		field := &ast.Field{Doc: doc, Names: idents, Type: typ, Tag: tag, Comment: p.lineComment}
   709		p.declare(field, nil, scope, ast.Var, idents...)
   710		p.resolve(typ)
   711	
   712		return field
   713	}
   714	
   715	func (p *parser) parseStructType() *ast.StructType {
   716		if p.trace {
   717			defer un(trace(p, "StructType"))
   718		}
   719	
   720		pos := p.expect(token.STRUCT)
   721		lbrace := p.expect(token.LBRACE)
   722		scope := ast.NewScope(nil) // struct scope
   723		var list []*ast.Field
   724		for p.tok == token.IDENT || p.tok == token.MUL || p.tok == token.LPAREN {
   725			// a field declaration cannot start with a '(' but we accept
   726			// it here for more robust parsing and better error messages
   727			// (parseFieldDecl will check and complain if necessary)
   728			list = append(list, p.parseFieldDecl(scope))
   729		}
   730		rbrace := p.expect(token.RBRACE)
   731	
   732		return &ast.StructType{
   733			Struct: pos,
   734			Fields: &ast.FieldList{
   735				Opening: lbrace,
   736				List:    list,
   737				Closing: rbrace,
   738			},
   739		}
   740	}
   741	
   742	func (p *parser) parsePointerType() *ast.StarExpr {
   743		if p.trace {
   744			defer un(trace(p, "PointerType"))
   745		}
   746	
   747		star := p.expect(token.MUL)
   748		base := p.parseType()
   749	
   750		return &ast.StarExpr{Star: star, X: base}
   751	}
   752	
   753	// If the result is an identifier, it is not resolved.
   754	func (p *parser) tryVarType(isParam bool) ast.Expr {
   755		if isParam && p.tok == token.ELLIPSIS {
   756			pos := p.pos
   757			p.next()
   758			typ := p.tryIdentOrType() // don't use parseType so we can provide better error message
   759			if typ != nil {
   760				p.resolve(typ)
   761			} else {
   762				p.error(pos, "'...' parameter is missing type")
   763				typ = &ast.BadExpr{From: pos, To: p.pos}
   764			}
   765			return &ast.Ellipsis{Ellipsis: pos, Elt: typ}
   766		}
   767		return p.tryIdentOrType()
   768	}
   769	
   770	// If the result is an identifier, it is not resolved.
   771	func (p *parser) parseVarType(isParam bool) ast.Expr {
   772		typ := p.tryVarType(isParam)
   773		if typ == nil {
   774			pos := p.pos
   775			p.errorExpected(pos, "type")
   776			p.next() // make progress
   777			typ = &ast.BadExpr{From: pos, To: p.pos}
   778		}
   779		return typ
   780	}
   781	
   782	// If any of the results are identifiers, they are not resolved.
   783	func (p *parser) parseVarList(isParam bool) (list []ast.Expr, typ ast.Expr) {
   784		if p.trace {
   785			defer un(trace(p, "VarList"))
   786		}
   787	
   788		// a list of identifiers looks like a list of type names
   789		//
   790		// parse/tryVarType accepts any type (including parenthesized
   791		// ones) even though the syntax does not permit them here: we
   792		// accept them all for more robust parsing and complain later
   793		for typ := p.parseVarType(isParam); typ != nil; {
   794			list = append(list, typ)
   795			if p.tok != token.COMMA {
   796				break
   797			}
   798			p.next()
   799			typ = p.tryVarType(isParam) // maybe nil as in: func f(int,) {}
   800		}
   801	
   802		// if we had a list of identifiers, it must be followed by a type
   803		typ = p.tryVarType(isParam)
   804	
   805		return
   806	}
   807	
   808	func (p *parser) parseParameterList(scope *ast.Scope, ellipsisOk bool) (params []*ast.Field) {
   809		if p.trace {
   810			defer un(trace(p, "ParameterList"))
   811		}
   812	
   813		// ParameterDecl
   814		list, typ := p.parseVarList(ellipsisOk)
   815	
   816		// analyze case
   817		if typ != nil {
   818			// IdentifierList Type
   819			idents := p.makeIdentList(list)
   820			field := &ast.Field{Names: idents, Type: typ}
   821			params = append(params, field)
   822			// Go spec: The scope of an identifier denoting a function
   823			// parameter or result variable is the function body.
   824			p.declare(field, nil, scope, ast.Var, idents...)
   825			p.resolve(typ)
   826			if p.tok == token.COMMA {
   827				p.next()
   828			}
   829			for p.tok != token.RPAREN && p.tok != token.EOF {
   830				idents := p.parseIdentList()
   831				typ := p.parseVarType(ellipsisOk)
   832				field := &ast.Field{Names: idents, Type: typ}
   833				params = append(params, field)
   834				// Go spec: The scope of an identifier denoting a function
   835				// parameter or result variable is the function body.
   836				p.declare(field, nil, scope, ast.Var, idents...)
   837				p.resolve(typ)
   838				if !p.atComma("parameter list") {
   839					break
   840				}
   841				p.next()
   842			}
   843		} else {
   844			// Type { "," Type } (anonymous parameters)
   845			params = make([]*ast.Field, len(list))
   846			for i, typ := range list {
   847				p.resolve(typ)
   848				params[i] = &ast.Field{Type: typ}
   849			}
   850		}
   851	
   852		return
   853	}
   854	
   855	func (p *parser) parseParameters(scope *ast.Scope, ellipsisOk bool) *ast.FieldList {
   856		if p.trace {
   857			defer un(trace(p, "Parameters"))
   858		}
   859	
   860		var params []*ast.Field
   861		lparen := p.expect(token.LPAREN)
   862		if p.tok != token.RPAREN {
   863			params = p.parseParameterList(scope, ellipsisOk)
   864		}
   865		rparen := p.expect(token.RPAREN)
   866	
   867		return &ast.FieldList{Opening: lparen, List: params, Closing: rparen}
   868	}
   869	
   870	func (p *parser) parseResult(scope *ast.Scope) *ast.FieldList {
   871		if p.trace {
   872			defer un(trace(p, "Result"))
   873		}
   874	
   875		if p.tok == token.LPAREN {
   876			return p.parseParameters(scope, false)
   877		}
   878	
   879		typ := p.tryType()
   880		if typ != nil {
   881			list := make([]*ast.Field, 1)
   882			list[0] = &ast.Field{Type: typ}
   883			return &ast.FieldList{List: list}
   884		}
   885	
   886		return nil
   887	}
   888	
   889	func (p *parser) parseSignature(scope *ast.Scope) (params, results *ast.FieldList) {
   890		if p.trace {
   891			defer un(trace(p, "Signature"))
   892		}
   893	
   894		params = p.parseParameters(scope, true)
   895		results = p.parseResult(scope)
   896	
   897		return
   898	}
   899	
   900	func (p *parser) parseFuncType() (*ast.FuncType, *ast.Scope) {
   901		if p.trace {
   902			defer un(trace(p, "FuncType"))
   903		}
   904	
   905		pos := p.expect(token.FUNC)
   906		scope := ast.NewScope(p.topScope) // function scope
   907		params, results := p.parseSignature(scope)
   908	
   909		return &ast.FuncType{Func: pos, Params: params, Results: results}, scope
   910	}
   911	
   912	func (p *parser) parseMethodSpec(scope *ast.Scope) *ast.Field {
   913		if p.trace {
   914			defer un(trace(p, "MethodSpec"))
   915		}
   916	
   917		doc := p.leadComment
   918		var idents []*ast.Ident
   919		var typ ast.Expr
   920		x := p.parseTypeName()
   921		if ident, isIdent := x.(*ast.Ident); isIdent && p.tok == token.LPAREN {
   922			// method
   923			idents = []*ast.Ident{ident}
   924			scope := ast.NewScope(nil) // method scope
   925			params, results := p.parseSignature(scope)
   926			typ = &ast.FuncType{Func: token.NoPos, Params: params, Results: results}
   927		} else {
   928			// embedded interface
   929			typ = x
   930			p.resolve(typ)
   931		}
   932		p.expectSemi() // call before accessing p.linecomment
   933	
   934		spec := &ast.Field{Doc: doc, Names: idents, Type: typ, Comment: p.lineComment}
   935		p.declare(spec, nil, scope, ast.Fun, idents...)
   936	
   937		return spec
   938	}
   939	
   940	func (p *parser) parseInterfaceType() *ast.InterfaceType {
   941		if p.trace {
   942			defer un(trace(p, "InterfaceType"))
   943		}
   944	
   945		pos := p.expect(token.INTERFACE)
   946		lbrace := p.expect(token.LBRACE)
   947		scope := ast.NewScope(nil) // interface scope
   948		var list []*ast.Field
   949		for p.tok == token.IDENT {
   950			list = append(list, p.parseMethodSpec(scope))
   951		}
   952		rbrace := p.expect(token.RBRACE)
   953	
   954		return &ast.InterfaceType{
   955			Interface: pos,
   956			Methods: &ast.FieldList{
   957				Opening: lbrace,
   958				List:    list,
   959				Closing: rbrace,
   960			},
   961		}
   962	}
   963	
   964	func (p *parser) parseMapType() *ast.MapType {
   965		if p.trace {
   966			defer un(trace(p, "MapType"))
   967		}
   968	
   969		pos := p.expect(token.MAP)
   970		p.expect(token.LBRACK)
   971		key := p.parseType()
   972		p.expect(token.RBRACK)
   973		value := p.parseType()
   974	
   975		return &ast.MapType{Map: pos, Key: key, Value: value}
   976	}
   977	
   978	func (p *parser) parseChanType() *ast.ChanType {
   979		if p.trace {
   980			defer un(trace(p, "ChanType"))
   981		}
   982	
   983		pos := p.pos
   984		dir := ast.SEND | ast.RECV
   985		var arrow token.Pos
   986		if p.tok == token.CHAN {
   987			p.next()
   988			if p.tok == token.ARROW {
   989				arrow = p.pos
   990				p.next()
   991				dir = ast.SEND
   992			}
   993		} else {
   994			arrow = p.expect(token.ARROW)
   995			p.expect(token.CHAN)
   996			dir = ast.RECV
   997		}
   998		value := p.parseType()
   999	
  1000		return &ast.ChanType{Begin: pos, Arrow: arrow, Dir: dir, Value: value}
  1001	}
  1002	
  1003	// If the result is an identifier, it is not resolved.
  1004	func (p *parser) tryIdentOrType() ast.Expr {
  1005		switch p.tok {
  1006		case token.IDENT:
  1007			return p.parseTypeName()
  1008		case token.LBRACK:
  1009			return p.parseArrayType()
  1010		case token.STRUCT:
  1011			return p.parseStructType()
  1012		case token.MUL:
  1013			return p.parsePointerType()
  1014		case token.FUNC:
  1015			typ, _ := p.parseFuncType()
  1016			return typ
  1017		case token.INTERFACE:
  1018			return p.parseInterfaceType()
  1019		case token.MAP:
  1020			return p.parseMapType()
  1021		case token.CHAN, token.ARROW:
  1022			return p.parseChanType()
  1023		case token.LPAREN:
  1024			lparen := p.pos
  1025			p.next()
  1026			typ := p.parseType()
  1027			rparen := p.expect(token.RPAREN)
  1028			return &ast.ParenExpr{Lparen: lparen, X: typ, Rparen: rparen}
  1029		}
  1030	
  1031		// no type found
  1032		return nil
  1033	}
  1034	
  1035	func (p *parser) tryType() ast.Expr {
  1036		typ := p.tryIdentOrType()
  1037		if typ != nil {
  1038			p.resolve(typ)
  1039		}
  1040		return typ
  1041	}
  1042	
  1043	// ----------------------------------------------------------------------------
  1044	// Blocks
  1045	
  1046	func (p *parser) parseStmtList() (list []ast.Stmt) {
  1047		if p.trace {
  1048			defer un(trace(p, "StatementList"))
  1049		}
  1050	
  1051		for p.tok != token.CASE && p.tok != token.DEFAULT && p.tok != token.RBRACE && p.tok != token.EOF {
  1052			list = append(list, p.parseStmt())
  1053		}
  1054	
  1055		return
  1056	}
  1057	
  1058	func (p *parser) parseBody(scope *ast.Scope) *ast.BlockStmt {
  1059		if p.trace {
  1060			defer un(trace(p, "Body"))
  1061		}
  1062	
  1063		lbrace := p.expect(token.LBRACE)
  1064		p.topScope = scope // open function scope
  1065		p.openLabelScope()
  1066		list := p.parseStmtList()
  1067		p.closeLabelScope()
  1068		p.closeScope()
  1069		rbrace := p.expect(token.RBRACE)
  1070	
  1071		return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
  1072	}
  1073	
  1074	func (p *parser) parseBlockStmt() *ast.BlockStmt {
  1075		if p.trace {
  1076			defer un(trace(p, "BlockStmt"))
  1077		}
  1078	
  1079		lbrace := p.expect(token.LBRACE)
  1080		p.openScope()
  1081		list := p.parseStmtList()
  1082		p.closeScope()
  1083		rbrace := p.expect(token.RBRACE)
  1084	
  1085		return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
  1086	}
  1087	
  1088	// ----------------------------------------------------------------------------
  1089	// Expressions
  1090	
  1091	func (p *parser) parseFuncTypeOrLit() ast.Expr {
  1092		if p.trace {
  1093			defer un(trace(p, "FuncTypeOrLit"))
  1094		}
  1095	
  1096		typ, scope := p.parseFuncType()
  1097		if p.tok != token.LBRACE {
  1098			// function type only
  1099			return typ
  1100		}
  1101	
  1102		p.exprLev++
  1103		body := p.parseBody(scope)
  1104		p.exprLev--
  1105	
  1106		return &ast.FuncLit{Type: typ, Body: body}
  1107	}
  1108	
  1109	// parseOperand may return an expression or a raw type (incl. array
  1110	// types of the form [...]T. Callers must verify the result.
  1111	// If lhs is set and the result is an identifier, it is not resolved.
  1112	//
  1113	func (p *parser) parseOperand(lhs bool) ast.Expr {
  1114		if p.trace {
  1115			defer un(trace(p, "Operand"))
  1116		}
  1117	
  1118		switch p.tok {
  1119		case token.IDENT:
  1120			x := p.parseIdent()
  1121			if !lhs {
  1122				p.resolve(x)
  1123			}
  1124			return x
  1125	
  1126		case token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING:
  1127			x := &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
  1128			p.next()
  1129			return x
  1130	
  1131		case token.LPAREN:
  1132			lparen := p.pos
  1133			p.next()
  1134			p.exprLev++
  1135			x := p.parseRhsOrType() // types may be parenthesized: (some type)
  1136			p.exprLev--
  1137			rparen := p.expect(token.RPAREN)
  1138			return &ast.ParenExpr{Lparen: lparen, X: x, Rparen: rparen}
  1139	
  1140		case token.FUNC:
  1141			return p.parseFuncTypeOrLit()
  1142		}
  1143	
  1144		if typ := p.tryIdentOrType(); typ != nil {
  1145			// could be type for composite literal or conversion
  1146			_, isIdent := typ.(*ast.Ident)
  1147			assert(!isIdent, "type cannot be identifier")
  1148			return typ
  1149		}
  1150	
  1151		// we have an error
  1152		pos := p.pos
  1153		p.errorExpected(pos, "operand")
  1154		syncStmt(p)
  1155		return &ast.BadExpr{From: pos, To: p.pos}
  1156	}
  1157	
  1158	func (p *parser) parseSelector(x ast.Expr) ast.Expr {
  1159		if p.trace {
  1160			defer un(trace(p, "Selector"))
  1161		}
  1162	
  1163		sel := p.parseIdent()
  1164	
  1165		return &ast.SelectorExpr{X: x, Sel: sel}
  1166	}
  1167	
  1168	func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr {
  1169		if p.trace {
  1170			defer un(trace(p, "TypeAssertion"))
  1171		}
  1172	
  1173		lparen := p.expect(token.LPAREN)
  1174		var typ ast.Expr
  1175		if p.tok == token.TYPE {
  1176			// type switch: typ == nil
  1177			p.next()
  1178		} else {
  1179			typ = p.parseType()
  1180		}
  1181		rparen := p.expect(token.RPAREN)
  1182	
  1183		return &ast.TypeAssertExpr{X: x, Type: typ, Lparen: lparen, Rparen: rparen}
  1184	}
  1185	
  1186	func (p *parser) parseIndexOrSlice(x ast.Expr) ast.Expr {
  1187		if p.trace {
  1188			defer un(trace(p, "IndexOrSlice"))
  1189		}
  1190	
  1191		const N = 3 // change the 3 to 2 to disable 3-index slices
  1192		lbrack := p.expect(token.LBRACK)
  1193		p.exprLev++
  1194		var index [N]ast.Expr
  1195		var colons [N - 1]token.Pos
  1196		if p.tok != token.COLON {
  1197			index[0] = p.parseRhs()
  1198		}
  1199		ncolons := 0
  1200		for p.tok == token.COLON && ncolons < len(colons) {
  1201			colons[ncolons] = p.pos
  1202			ncolons++
  1203			p.next()
  1204			if p.tok != token.COLON && p.tok != token.RBRACK && p.tok != token.EOF {
  1205				index[ncolons] = p.parseRhs()
  1206			}
  1207		}
  1208		p.exprLev--
  1209		rbrack := p.expect(token.RBRACK)
  1210	
  1211		if ncolons > 0 {
  1212			// slice expression
  1213			slice3 := false
  1214			if ncolons == 2 {
  1215				slice3 = true
  1216				// Check presence of 2nd and 3rd index here rather than during type-checking
  1217				// to prevent erroneous programs from passing through gofmt (was issue 7305).
  1218				if index[1] == nil {
  1219					p.error(colons[0], "2nd index required in 3-index slice")
  1220					index[1] = &ast.BadExpr{From: colons[0] + 1, To: colons[1]}
  1221				}
  1222				if index[2] == nil {
  1223					p.error(colons[1], "3rd index required in 3-index slice")
  1224					index[2] = &ast.BadExpr{From: colons[1] + 1, To: rbrack}
  1225				}
  1226			}
  1227			return &ast.SliceExpr{X: x, Lbrack: lbrack, Low: index[0], High: index[1], Max: index[2], Slice3: slice3, Rbrack: rbrack}
  1228		}
  1229	
  1230		return &ast.IndexExpr{X: x, Lbrack: lbrack, Index: index[0], Rbrack: rbrack}
  1231	}
  1232	
  1233	func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr {
  1234		if p.trace {
  1235			defer un(trace(p, "CallOrConversion"))
  1236		}
  1237	
  1238		lparen := p.expect(token.LPAREN)
  1239		p.exprLev++
  1240		var list []ast.Expr
  1241		var ellipsis token.Pos
  1242		for p.tok != token.RPAREN && p.tok != token.EOF && !ellipsis.IsValid() {
  1243			list = append(list, p.parseRhsOrType()) // builtins may expect a type: make(some type, ...)
  1244			if p.tok == token.ELLIPSIS {
  1245				ellipsis = p.pos
  1246				p.next()
  1247			}
  1248			if !p.atComma("argument list") {
  1249				break
  1250			}
  1251			p.next()
  1252		}
  1253		p.exprLev--
  1254		rparen := p.expectClosing(token.RPAREN, "argument list")
  1255	
  1256		return &ast.CallExpr{Fun: fun, Lparen: lparen, Args: list, Ellipsis: ellipsis, Rparen: rparen}
  1257	}
  1258	
  1259	func (p *parser) parseElement(keyOk bool) ast.Expr {
  1260		if p.trace {
  1261			defer un(trace(p, "Element"))
  1262		}
  1263	
  1264		if p.tok == token.LBRACE {
  1265			return p.parseLiteralValue(nil)
  1266		}
  1267	
  1268		// Because the parser doesn't know the composite literal type, it cannot
  1269		// know if a key that's an identifier is a struct field name or a name
  1270		// denoting a value. The former is not resolved by the parser or the
  1271		// resolver.
  1272		//
  1273		// Instead, _try_ to resolve such a key if possible. If it resolves,
  1274		// it a) has correctly resolved, or b) incorrectly resolved because
  1275		// the key is a struct field with a name matching another identifier.
  1276		// In the former case we are done, and in the latter case we don't
  1277		// care because the type checker will do a separate field lookup.
  1278		//
  1279		// If the key does not resolve, it a) must be defined at the top
  1280		// level in another file of the same package, the universe scope, or be
  1281		// undeclared; or b) it is a struct field. In the former case, the type
  1282		// checker can do a top-level lookup, and in the latter case it will do
  1283		// a separate field lookup.
  1284		x := p.checkExpr(p.parseExpr(keyOk))
  1285		if keyOk {
  1286			if p.tok == token.COLON {
  1287				colon := p.pos
  1288				p.next()
  1289				// Try to resolve the key but don't collect it
  1290				// as unresolved identifier if it fails so that
  1291				// we don't get (possibly false) errors about
  1292				// undeclared names.
  1293				p.tryResolve(x, false)
  1294				return &ast.KeyValueExpr{Key: x, Colon: colon, Value: p.parseElement(false)}
  1295			}
  1296			p.resolve(x) // not a key
  1297		}
  1298	
  1299		return x
  1300	}
  1301	
  1302	func (p *parser) parseElementList() (list []ast.Expr) {
  1303		if p.trace {
  1304			defer un(trace(p, "ElementList"))
  1305		}
  1306	
  1307		for p.tok != token.RBRACE && p.tok != token.EOF {
  1308			list = append(list, p.parseElement(true))
  1309			if !p.atComma("composite literal") {
  1310				break
  1311			}
  1312			p.next()
  1313		}
  1314	
  1315		return
  1316	}
  1317	
  1318	func (p *parser) parseLiteralValue(typ ast.Expr) ast.Expr {
  1319		if p.trace {
  1320			defer un(trace(p, "LiteralValue"))
  1321		}
  1322	
  1323		lbrace := p.expect(token.LBRACE)
  1324		var elts []ast.Expr
  1325		p.exprLev++
  1326		if p.tok != token.RBRACE {
  1327			elts = p.parseElementList()
  1328		}
  1329		p.exprLev--
  1330		rbrace := p.expectClosing(token.RBRACE, "composite literal")
  1331		return &ast.CompositeLit{Type: typ, Lbrace: lbrace, Elts: elts, Rbrace: rbrace}
  1332	}
  1333	
  1334	// checkExpr checks that x is an expression (and not a type).
  1335	func (p *parser) checkExpr(x ast.Expr) ast.Expr {
  1336		switch unparen(x).(type) {
  1337		case *ast.BadExpr:
  1338		case *ast.Ident:
  1339		case *ast.BasicLit:
  1340		case *ast.FuncLit:
  1341		case *ast.CompositeLit:
  1342		case *ast.ParenExpr:
  1343			panic("unreachable")
  1344		case *ast.SelectorExpr:
  1345		case *ast.IndexExpr:
  1346		case *ast.SliceExpr:
  1347		case *ast.TypeAssertExpr:
  1348			// If t.Type == nil we have a type assertion of the form
  1349			// y.(type), which is only allowed in type switch expressions.
  1350			// It's hard to exclude those but for the case where we are in
  1351			// a type switch. Instead be lenient and test this in the type
  1352			// checker.
  1353		case *ast.CallExpr:
  1354		case *ast.StarExpr:
  1355		case *ast.UnaryExpr:
  1356		case *ast.BinaryExpr:
  1357		default:
  1358			// all other nodes are not proper expressions
  1359			p.errorExpected(x.Pos(), "expression")
  1360			x = &ast.BadExpr{From: x.Pos(), To: p.safePos(x.End())}
  1361		}
  1362		return x
  1363	}
  1364	
  1365	// isTypeName returns true iff x is a (qualified) TypeName.
  1366	func isTypeName(x ast.Expr) bool {
  1367		switch t := x.(type) {
  1368		case *ast.BadExpr:
  1369		case *ast.Ident:
  1370		case *ast.SelectorExpr:
  1371			_, isIdent := t.X.(*ast.Ident)
  1372			return isIdent
  1373		default:
  1374			return false // all other nodes are not type names
  1375		}
  1376		return true
  1377	}
  1378	
  1379	// isLiteralType returns true iff x is a legal composite literal type.
  1380	func isLiteralType(x ast.Expr) bool {
  1381		switch t := x.(type) {
  1382		case *ast.BadExpr:
  1383		case *ast.Ident:
  1384		case *ast.SelectorExpr:
  1385			_, isIdent := t.X.(*ast.Ident)
  1386			return isIdent
  1387		case *ast.ArrayType:
  1388		case *ast.StructType:
  1389		case *ast.MapType:
  1390		default:
  1391			return false // all other nodes are not legal composite literal types
  1392		}
  1393		return true
  1394	}
  1395	
  1396	// If x is of the form *T, deref returns T, otherwise it returns x.
  1397	func deref(x ast.Expr) ast.Expr {
  1398		if p, isPtr := x.(*ast.StarExpr); isPtr {
  1399			x = p.X
  1400		}
  1401		return x
  1402	}
  1403	
  1404	// If x is of the form (T), unparen returns unparen(T), otherwise it returns x.
  1405	func unparen(x ast.Expr) ast.Expr {
  1406		if p, isParen := x.(*ast.ParenExpr); isParen {
  1407			x = unparen(p.X)
  1408		}
  1409		return x
  1410	}
  1411	
  1412	// checkExprOrType checks that x is an expression or a type
  1413	// (and not a raw type such as [...]T).
  1414	//
  1415	func (p *parser) checkExprOrType(x ast.Expr) ast.Expr {
  1416		switch t := unparen(x).(type) {
  1417		case *ast.ParenExpr:
  1418			panic("unreachable")
  1419		case *ast.UnaryExpr:
  1420		case *ast.ArrayType:
  1421			if len, isEllipsis := t.Len.(*ast.Ellipsis); isEllipsis {
  1422				p.error(len.Pos(), "expected array length, found '...'")
  1423				x = &ast.BadExpr{From: x.Pos(), To: p.safePos(x.End())}
  1424			}
  1425		}
  1426	
  1427		// all other nodes are expressions or types
  1428		return x
  1429	}
  1430	
  1431	// If lhs is set and the result is an identifier, it is not resolved.
  1432	func (p *parser) parsePrimaryExpr(lhs bool) ast.Expr {
  1433		if p.trace {
  1434			defer un(trace(p, "PrimaryExpr"))
  1435		}
  1436	
  1437		x := p.parseOperand(lhs)
  1438	L:
  1439		for {
  1440			switch p.tok {
  1441			case token.PERIOD:
  1442				p.next()
  1443				if lhs {
  1444					p.resolve(x)
  1445				}
  1446				switch p.tok {
  1447				case token.IDENT:
  1448					x = p.parseSelector(p.checkExprOrType(x))
  1449				case token.LPAREN:
  1450					x = p.parseTypeAssertion(p.checkExpr(x))
  1451				default:
  1452					pos := p.pos
  1453					p.errorExpected(pos, "selector or type assertion")
  1454					p.next() // make progress
  1455					x = &ast.BadExpr{From: pos, To: p.pos}
  1456				}
  1457			case token.LBRACK:
  1458				if lhs {
  1459					p.resolve(x)
  1460				}
  1461				x = p.parseIndexOrSlice(p.checkExpr(x))
  1462			case token.LPAREN:
  1463				if lhs {
  1464					p.resolve(x)
  1465				}
  1466				x = p.parseCallOrConversion(p.checkExprOrType(x))
  1467			case token.LBRACE:
  1468				if isLiteralType(x) && (p.exprLev >= 0 || !isTypeName(x)) {
  1469					if lhs {
  1470						p.resolve(x)
  1471					}
  1472					x = p.parseLiteralValue(x)
  1473				} else {
  1474					break L
  1475				}
  1476			default:
  1477				break L
  1478			}
  1479			lhs = false // no need to try to resolve again
  1480		}
  1481	
  1482		return x
  1483	}
  1484	
  1485	// If lhs is set and the result is an identifier, it is not resolved.
  1486	func (p *parser) parseUnaryExpr(lhs bool) ast.Expr {
  1487		if p.trace {
  1488			defer un(trace(p, "UnaryExpr"))
  1489		}
  1490	
  1491		switch p.tok {
  1492		case token.ADD, token.SUB, token.NOT, token.XOR, token.AND:
  1493			pos, op := p.pos, p.tok
  1494			p.next()
  1495			x := p.parseUnaryExpr(false)
  1496			return &ast.UnaryExpr{OpPos: pos, Op: op, X: p.checkExpr(x)}
  1497	
  1498		case token.ARROW:
  1499			// channel type or receive expression
  1500			arrow := p.pos
  1501			p.next()
  1502	
  1503			// If the next token is token.CHAN we still don't know if it
  1504			// is a channel type or a receive operation - we only know
  1505			// once we have found the end of the unary expression. There
  1506			// are two cases:
  1507			//
  1508			//   <- type  => (<-type) must be channel type
  1509			//   <- expr  => <-(expr) is a receive from an expression
  1510			//
  1511			// In the first case, the arrow must be re-associated with
  1512			// the channel type parsed already:
  1513			//
  1514			//   <- (chan type)    =>  (<-chan type)
  1515			//   <- (chan<- type)  =>  (<-chan (<-type))
  1516	
  1517			x := p.parseUnaryExpr(false)
  1518	
  1519			// determine which case we have
  1520			if typ, ok := x.(*ast.ChanType); ok {
  1521				// (<-type)
  1522	
  1523				// re-associate position info and <-
  1524				dir := ast.SEND
  1525				for ok && dir == ast.SEND {
  1526					if typ.Dir == ast.RECV {
  1527						// error: (<-type) is (<-(<-chan T))
  1528						p.errorExpected(typ.Arrow, "'chan'")
  1529					}
  1530					arrow, typ.Begin, typ.Arrow = typ.Arrow, arrow, arrow
  1531					dir, typ.Dir = typ.Dir, ast.RECV
  1532					typ, ok = typ.Value.(*ast.ChanType)
  1533				}
  1534				if dir == ast.SEND {
  1535					p.errorExpected(arrow, "channel type")
  1536				}
  1537	
  1538				return x
  1539			}
  1540	
  1541			// <-(expr)
  1542			return &ast.UnaryExpr{OpPos: arrow, Op: token.ARROW, X: p.checkExpr(x)}
  1543	
  1544		case token.MUL:
  1545			// pointer type or unary "*" expression
  1546			pos := p.pos
  1547			p.next()
  1548			x := p.parseUnaryExpr(false)
  1549			return &ast.StarExpr{Star: pos, X: p.checkExprOrType(x)}
  1550		}
  1551	
  1552		return p.parsePrimaryExpr(lhs)
  1553	}
  1554	
  1555	func (p *parser) tokPrec() (token.Token, int) {
  1556		tok := p.tok
  1557		if p.inRhs && tok == token.ASSIGN {
  1558			tok = token.EQL
  1559		}
  1560		return tok, tok.Precedence()
  1561	}
  1562	
  1563	// If lhs is set and the result is an identifier, it is not resolved.
  1564	func (p *parser) parseBinaryExpr(lhs bool, prec1 int) ast.Expr {
  1565		if p.trace {
  1566			defer un(trace(p, "BinaryExpr"))
  1567		}
  1568	
  1569		x := p.parseUnaryExpr(lhs)
  1570		for _, prec := p.tokPrec(); prec >= prec1; prec-- {
  1571			for {
  1572				op, oprec := p.tokPrec()
  1573				if oprec != prec {
  1574					break
  1575				}
  1576				pos := p.expect(op)
  1577				if lhs {
  1578					p.resolve(x)
  1579					lhs = false
  1580				}
  1581				y := p.parseBinaryExpr(false, prec+1)
  1582				x = &ast.BinaryExpr{X: p.checkExpr(x), OpPos: pos, Op: op, Y: p.checkExpr(y)}
  1583			}
  1584		}
  1585	
  1586		return x
  1587	}
  1588	
  1589	// If lhs is set and the result is an identifier, it is not resolved.
  1590	// The result may be a type or even a raw type ([...]int). Callers must
  1591	// check the result (using checkExpr or checkExprOrType), depending on
  1592	// context.
  1593	func (p *parser) parseExpr(lhs bool) ast.Expr {
  1594		if p.trace {
  1595			defer un(trace(p, "Expression"))
  1596		}
  1597	
  1598		return p.parseBinaryExpr(lhs, token.LowestPrec+1)
  1599	}
  1600	
  1601	func (p *parser) parseRhs() ast.Expr {
  1602		old := p.inRhs
  1603		p.inRhs = true
  1604		x := p.checkExpr(p.parseExpr(false))
  1605		p.inRhs = old
  1606		return x
  1607	}
  1608	
  1609	func (p *parser) parseRhsOrType() ast.Expr {
  1610		old := p.inRhs
  1611		p.inRhs = true
  1612		x := p.checkExprOrType(p.parseExpr(false))
  1613		p.inRhs = old
  1614		return x
  1615	}
  1616	
  1617	// ----------------------------------------------------------------------------
  1618	// Statements
  1619	
  1620	// Parsing modes for parseSimpleStmt.
  1621	const (
  1622		basic = iota
  1623		labelOk
  1624		rangeOk
  1625	)
  1626	
  1627	// parseSimpleStmt returns true as 2nd result if it parsed the assignment
  1628	// of a range clause (with mode == rangeOk). The returned statement is an
  1629	// assignment with a right-hand side that is a single unary expression of
  1630	// the form "range x". No guarantees are given for the left-hand side.
  1631	func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) {
  1632		if p.trace {
  1633			defer un(trace(p, "SimpleStmt"))
  1634		}
  1635	
  1636		x := p.parseLhsList()
  1637	
  1638		switch p.tok {
  1639		case
  1640			token.DEFINE, token.ASSIGN, token.ADD_ASSIGN,
  1641			token.SUB_ASSIGN, token.MUL_ASSIGN, token.QUO_ASSIGN,
  1642			token.REM_ASSIGN, token.AND_ASSIGN, token.OR_ASSIGN,
  1643			token.XOR_ASSIGN, token.SHL_ASSIGN, token.SHR_ASSIGN, token.AND_NOT_ASSIGN:
  1644			// assignment statement, possibly part of a range clause
  1645			pos, tok := p.pos, p.tok
  1646			p.next()
  1647			var y []ast.Expr
  1648			isRange := false
  1649			if mode == rangeOk && p.tok == token.RANGE && (tok == token.DEFINE || tok == token.ASSIGN) {
  1650				pos := p.pos
  1651				p.next()
  1652				y = []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
  1653				isRange = true
  1654			} else {
  1655				y = p.parseRhsList()
  1656			}
  1657			as := &ast.AssignStmt{Lhs: x, TokPos: pos, Tok: tok, Rhs: y}
  1658			if tok == token.DEFINE {
  1659				p.shortVarDecl(as, x)
  1660			}
  1661			return as, isRange
  1662		}
  1663	
  1664		if len(x) > 1 {
  1665			p.errorExpected(x[0].Pos(), "1 expression")
  1666			// continue with first expression
  1667		}
  1668	
  1669		switch p.tok {
  1670		case token.COLON:
  1671			// labeled statement
  1672			colon := p.pos
  1673			p.next()
  1674			if label, isIdent := x[0].(*ast.Ident); mode == labelOk && isIdent {
  1675				// Go spec: The scope of a label is the body of the function
  1676				// in which it is declared and excludes the body of any nested
  1677				// function.
  1678				stmt := &ast.LabeledStmt{Label: label, Colon: colon, Stmt: p.parseStmt()}
  1679				p.declare(stmt, nil, p.labelScope, ast.Lbl, label)
  1680				return stmt, false
  1681			}
  1682			// The label declaration typically starts at x[0].Pos(), but the label
  1683			// declaration may be erroneous due to a token after that position (and
  1684			// before the ':'). If SpuriousErrors is not set, the (only) error re-
  1685			// ported for the line is the illegal label error instead of the token
  1686			// before the ':' that caused the problem. Thus, use the (latest) colon
  1687			// position for error reporting.
  1688			p.error(colon, "illegal label declaration")
  1689			return &ast.BadStmt{From: x[0].Pos(), To: colon + 1}, false
  1690	
  1691		case token.ARROW:
  1692			// send statement
  1693			arrow := p.pos
  1694			p.next()
  1695			y := p.parseRhs()
  1696			return &ast.SendStmt{Chan: x[0], Arrow: arrow, Value: y}, false
  1697	
  1698		case token.INC, token.DEC:
  1699			// increment or decrement
  1700			s := &ast.IncDecStmt{X: x[0], TokPos: p.pos, Tok: p.tok}
  1701			p.next()
  1702			return s, false
  1703		}
  1704	
  1705		// expression
  1706		return &ast.ExprStmt{X: x[0]}, false
  1707	}
  1708	
  1709	func (p *parser) parseCallExpr(callType string) *ast.CallExpr {
  1710		x := p.parseRhsOrType() // could be a conversion: (some type)(x)
  1711		if call, isCall := x.(*ast.CallExpr); isCall {
  1712			return call
  1713		}
  1714		if _, isBad := x.(*ast.BadExpr); !isBad {
  1715			// only report error if it's a new one
  1716			p.error(p.safePos(x.End()), fmt.Sprintf("function must be invoked in %s statement", callType))
  1717		}
  1718		return nil
  1719	}
  1720	
  1721	func (p *parser) parseGoStmt() ast.Stmt {
  1722		if p.trace {
  1723			defer un(trace(p, "GoStmt"))
  1724		}
  1725	
  1726		pos := p.expect(token.GO)
  1727		call := p.parseCallExpr("go")
  1728		p.expectSemi()
  1729		if call == nil {
  1730			return &ast.BadStmt{From: pos, To: pos + 2} // len("go")
  1731		}
  1732	
  1733		return &ast.GoStmt{Go: pos, Call: call}
  1734	}
  1735	
  1736	func (p *parser) parseDeferStmt() ast.Stmt {
  1737		if p.trace {
  1738			defer un(trace(p, "DeferStmt"))
  1739		}
  1740	
  1741		pos := p.expect(token.DEFER)
  1742		call := p.parseCallExpr("defer")
  1743		p.expectSemi()
  1744		if call == nil {
  1745			return &ast.BadStmt{From: pos, To: pos + 5} // len("defer")
  1746		}
  1747	
  1748		return &ast.DeferStmt{Defer: pos, Call: call}
  1749	}
  1750	
  1751	func (p *parser) parseReturnStmt() *ast.ReturnStmt {
  1752		if p.trace {
  1753			defer un(trace(p, "ReturnStmt"))
  1754		}
  1755	
  1756		pos := p.pos
  1757		p.expect(token.RETURN)
  1758		var x []ast.Expr
  1759		if p.tok != token.SEMICOLON && p.tok != token.RBRACE {
  1760			x = p.parseRhsList()
  1761		}
  1762		p.expectSemi()
  1763	
  1764		return &ast.ReturnStmt{Return: pos, Results: x}
  1765	}
  1766	
  1767	func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt {
  1768		if p.trace {
  1769			defer un(trace(p, "BranchStmt"))
  1770		}
  1771	
  1772		pos := p.expect(tok)
  1773		var label *ast.Ident
  1774		if tok != token.FALLTHROUGH && p.tok == token.IDENT {
  1775			label = p.parseIdent()
  1776			// add to list of unresolved targets
  1777			n := len(p.targetStack) - 1
  1778			p.targetStack[n] = append(p.targetStack[n], label)
  1779		}
  1780		p.expectSemi()
  1781	
  1782		return &ast.BranchStmt{TokPos: pos, Tok: tok, Label: label}
  1783	}
  1784	
  1785	func (p *parser) makeExpr(s ast.Stmt, kind string) ast.Expr {
  1786		if s == nil {
  1787			return nil
  1788		}
  1789		if es, isExpr := s.(*ast.ExprStmt); isExpr {
  1790			return p.checkExpr(es.X)
  1791		}
  1792		p.error(s.Pos(), fmt.Sprintf("expected %s, found simple statement (missing parentheses around composite literal?)", kind))
  1793		return &ast.BadExpr{From: s.Pos(), To: p.safePos(s.End())}
  1794	}
  1795	
  1796	func (p *parser) parseIfStmt() *ast.IfStmt {
  1797		if p.trace {
  1798			defer un(trace(p, "IfStmt"))
  1799		}
  1800	
  1801		pos := p.expect(token.IF)
  1802		p.openScope()
  1803		defer p.closeScope()
  1804	
  1805		var s ast.Stmt
  1806		var x ast.Expr
  1807		{
  1808			prevLev := p.exprLev
  1809			p.exprLev = -1
  1810			if p.tok == token.SEMICOLON {
  1811				p.next()
  1812				x = p.parseRhs()
  1813			} else {
  1814				s, _ = p.parseSimpleStmt(basic)
  1815				if p.tok == token.SEMICOLON {
  1816					p.next()
  1817					x = p.parseRhs()
  1818				} else {
  1819					x = p.makeExpr(s, "boolean expression")
  1820					s = nil
  1821				}
  1822			}
  1823			p.exprLev = prevLev
  1824		}
  1825	
  1826		body := p.parseBlockStmt()
  1827		var else_ ast.Stmt
  1828		if p.tok == token.ELSE {
  1829			p.next()
  1830			else_ = p.parseStmt()
  1831		} else {
  1832			p.expectSemi()
  1833		}
  1834	
  1835		return &ast.IfStmt{If: pos, Init: s, Cond: x, Body: body, Else: else_}
  1836	}
  1837	
  1838	func (p *parser) parseTypeList() (list []ast.Expr) {
  1839		if p.trace {
  1840			defer un(trace(p, "TypeList"))
  1841		}
  1842	
  1843		list = append(list, p.parseType())
  1844		for p.tok == token.COMMA {
  1845			p.next()
  1846			list = append(list, p.parseType())
  1847		}
  1848	
  1849		return
  1850	}
  1851	
  1852	func (p *parser) parseCaseClause(typeSwitch bool) *ast.CaseClause {
  1853		if p.trace {
  1854			defer un(trace(p, "CaseClause"))
  1855		}
  1856	
  1857		pos := p.pos
  1858		var list []ast.Expr
  1859		if p.tok == token.CASE {
  1860			p.next()
  1861			if typeSwitch {
  1862				list = p.parseTypeList()
  1863			} else {
  1864				list = p.parseRhsList()
  1865			}
  1866		} else {
  1867			p.expect(token.DEFAULT)
  1868		}
  1869	
  1870		colon := p.expect(token.COLON)
  1871		p.openScope()
  1872		body := p.parseStmtList()
  1873		p.closeScope()
  1874	
  1875		return &ast.CaseClause{Case: pos, List: list, Colon: colon, Body: body}
  1876	}
  1877	
  1878	func isTypeSwitchAssert(x ast.Expr) bool {
  1879		a, ok := x.(*ast.TypeAssertExpr)
  1880		return ok && a.Type == nil
  1881	}
  1882	
  1883	func isTypeSwitchGuard(s ast.Stmt) bool {
  1884		switch t := s.(type) {
  1885		case *ast.ExprStmt:
  1886			// x.(nil)
  1887			return isTypeSwitchAssert(t.X)
  1888		case *ast.AssignStmt:
  1889			// v := x.(nil)
  1890			return len(t.Lhs) == 1 && t.Tok == token.DEFINE && len(t.Rhs) == 1 && isTypeSwitchAssert(t.Rhs[0])
  1891		}
  1892		return false
  1893	}
  1894	
  1895	func (p *parser) parseSwitchStmt() ast.Stmt {
  1896		if p.trace {
  1897			defer un(trace(p, "SwitchStmt"))
  1898		}
  1899	
  1900		pos := p.expect(token.SWITCH)
  1901		p.openScope()
  1902		defer p.closeScope()
  1903	
  1904		var s1, s2 ast.Stmt
  1905		if p.tok != token.LBRACE {
  1906			prevLev := p.exprLev
  1907			p.exprLev = -1
  1908			if p.tok != token.SEMICOLON {
  1909				s2, _ = p.parseSimpleStmt(basic)
  1910			}
  1911			if p.tok == token.SEMICOLON {
  1912				p.next()
  1913				s1 = s2
  1914				s2 = nil
  1915				if p.tok != token.LBRACE {
  1916					// A TypeSwitchGuard may declare a variable in addition
  1917					// to the variable declared in the initial SimpleStmt.
  1918					// Introduce extra scope to avoid redeclaration errors:
  1919					//
  1920					//	switch t := 0; t := x.(T) { ... }
  1921					//
  1922					// (this code is not valid Go because the first t
  1923					// cannot be accessed and thus is never used, the extra
  1924					// scope is needed for the correct error message).
  1925					//
  1926					// If we don't have a type switch, s2 must be an expression.
  1927					// Having the extra nested but empty scope won't affect it.
  1928					p.openScope()
  1929					defer p.closeScope()
  1930					s2, _ = p.parseSimpleStmt(basic)
  1931				}
  1932			}
  1933			p.exprLev = prevLev
  1934		}
  1935	
  1936		typeSwitch := isTypeSwitchGuard(s2)
  1937		lbrace := p.expect(token.LBRACE)
  1938		var list []ast.Stmt
  1939		for p.tok == token.CASE || p.tok == token.DEFAULT {
  1940			list = append(list, p.parseCaseClause(typeSwitch))
  1941		}
  1942		rbrace := p.expect(token.RBRACE)
  1943		p.expectSemi()
  1944		body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
  1945	
  1946		if typeSwitch {
  1947			return &ast.TypeSwitchStmt{Switch: pos, Init: s1, Assign: s2, Body: body}
  1948		}
  1949	
  1950		return &ast.SwitchStmt{Switch: pos, Init: s1, Tag: p.makeExpr(s2, "switch expression"), Body: body}
  1951	}
  1952	
  1953	func (p *parser) parseCommClause() *ast.CommClause {
  1954		if p.trace {
  1955			defer un(trace(p, "CommClause"))
  1956		}
  1957	
  1958		p.openScope()
  1959		pos := p.pos
  1960		var comm ast.Stmt
  1961		if p.tok == token.CASE {
  1962			p.next()
  1963			lhs := p.parseLhsList()
  1964			if p.tok == token.ARROW {
  1965				// SendStmt
  1966				if len(lhs) > 1 {
  1967					p.errorExpected(lhs[0].Pos(), "1 expression")
  1968					// continue with first expression
  1969				}
  1970				arrow := p.pos
  1971				p.next()
  1972				rhs := p.parseRhs()
  1973				comm = &ast.SendStmt{Chan: lhs[0], Arrow: arrow, Value: rhs}
  1974			} else {
  1975				// RecvStmt
  1976				if tok := p.tok; tok == token.ASSIGN || tok == token.DEFINE {
  1977					// RecvStmt with assignment
  1978					if len(lhs) > 2 {
  1979						p.errorExpected(lhs[0].Pos(), "1 or 2 expressions")
  1980						// continue with first two expressions
  1981						lhs = lhs[0:2]
  1982					}
  1983					pos := p.pos
  1984					p.next()
  1985					rhs := p.parseRhs()
  1986					as := &ast.AssignStmt{Lhs: lhs, TokPos: pos, Tok: tok, Rhs: []ast.Expr{rhs}}
  1987					if tok == token.DEFINE {
  1988						p.shortVarDecl(as, lhs)
  1989					}
  1990					comm = as
  1991				} else {
  1992					// lhs must be single receive operation
  1993					if len(lhs) > 1 {
  1994						p.errorExpected(lhs[0].Pos(), "1 expression")
  1995						// continue with first expression
  1996					}
  1997					comm = &ast.ExprStmt{X: lhs[0]}
  1998				}
  1999			}
  2000		} else {
  2001			p.expect(token.DEFAULT)
  2002		}
  2003	
  2004		colon := p.expect(token.COLON)
  2005		body := p.parseStmtList()
  2006		p.closeScope()
  2007	
  2008		return &ast.CommClause{Case: pos, Comm: comm, Colon: colon, Body: body}
  2009	}
  2010	
  2011	func (p *parser) parseSelectStmt() *ast.SelectStmt {
  2012		if p.trace {
  2013			defer un(trace(p, "SelectStmt"))
  2014		}
  2015	
  2016		pos := p.expect(token.SELECT)
  2017		lbrace := p.expect(token.LBRACE)
  2018		var list []ast.Stmt
  2019		for p.tok == token.CASE || p.tok == token.DEFAULT {
  2020			list = append(list, p.parseCommClause())
  2021		}
  2022		rbrace := p.expect(token.RBRACE)
  2023		p.expectSemi()
  2024		body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
  2025	
  2026		return &ast.SelectStmt{Select: pos, Body: body}
  2027	}
  2028	
  2029	func (p *parser) parseForStmt() ast.Stmt {
  2030		if p.trace {
  2031			defer un(trace(p, "ForStmt"))
  2032		}
  2033	
  2034		pos := p.expect(token.FOR)
  2035		p.openScope()
  2036		defer p.closeScope()
  2037	
  2038		var s1, s2, s3 ast.Stmt
  2039		var isRange bool
  2040		if p.tok != token.LBRACE {
  2041			prevLev := p.exprLev
  2042			p.exprLev = -1
  2043			if p.tok != token.SEMICOLON {
  2044				s2, isRange = p.parseSimpleStmt(rangeOk)
  2045			}
  2046			if !isRange && p.tok == token.SEMICOLON {
  2047				p.next()
  2048				s1 = s2
  2049				s2 = nil
  2050				if p.tok != token.SEMICOLON {
  2051					s2, _ = p.parseSimpleStmt(basic)
  2052				}
  2053				p.expectSemi()
  2054				if p.tok != token.LBRACE {
  2055					s3, _ = p.parseSimpleStmt(basic)
  2056				}
  2057			}
  2058			p.exprLev = prevLev
  2059		}
  2060	
  2061		body := p.parseBlockStmt()
  2062		p.expectSemi()
  2063	
  2064		if isRange {
  2065			as := s2.(*ast.AssignStmt)
  2066			// check lhs
  2067			var key, value ast.Expr
  2068			switch len(as.Lhs) {
  2069			case 2:
  2070				key, value = as.Lhs[0], as.Lhs[1]
  2071			case 1:
  2072				key = as.Lhs[0]
  2073			default:
  2074				p.errorExpected(as.Lhs[0].Pos(), "1 or 2 expressions")
  2075				return &ast.BadStmt{From: pos, To: p.safePos(body.End())}
  2076			}
  2077			// parseSimpleStmt returned a right-hand side that
  2078			// is a single unary expression of the form "range x"
  2079			x := as.Rhs[0].(*ast.UnaryExpr).X
  2080			return &ast.RangeStmt{
  2081				For:    pos,
  2082				Key:    key,
  2083				Value:  value,
  2084				TokPos: as.TokPos,
  2085				Tok:    as.Tok,
  2086				X:      x,
  2087				Body:   body,
  2088			}
  2089		}
  2090	
  2091		// regular for statement
  2092		return &ast.ForStmt{
  2093			For:  pos,
  2094			Init: s1,
  2095			Cond: p.makeExpr(s2, "boolean or range expression"),
  2096			Post: s3,
  2097			Body: body,
  2098		}
  2099	}
  2100	
  2101	func (p *parser) parseStmt() (s ast.Stmt) {
  2102		if p.trace {
  2103			defer un(trace(p, "Statement"))
  2104		}
  2105	
  2106		switch p.tok {
  2107		case token.CONST, token.TYPE, token.VAR:
  2108			s = &ast.DeclStmt{Decl: p.parseDecl(syncStmt)}
  2109		case
  2110			// tokens that may start an expression
  2111			token.IDENT, token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING, token.FUNC, token.LPAREN, // operands
  2112			token.LBRACK, token.STRUCT, // composite types
  2113			token.ADD, token.SUB, token.MUL, token.AND, token.XOR, token.ARROW, token.NOT: // unary operators
  2114			s, _ = p.parseSimpleStmt(labelOk)
  2115			// because of the required look-ahead, labeled statements are
  2116			// parsed by parseSimpleStmt - don't expect a semicolon after
  2117			// them
  2118			if _, isLabeledStmt := s.(*ast.LabeledStmt); !isLabeledStmt {
  2119				p.expectSemi()
  2120			}
  2121		case token.GO:
  2122			s = p.parseGoStmt()
  2123		case token.DEFER:
  2124			s = p.parseDeferStmt()
  2125		case token.RETURN:
  2126			s = p.parseReturnStmt()
  2127		case token.BREAK, token.CONTINUE, token.GOTO, token.FALLTHROUGH:
  2128			s = p.parseBranchStmt(p.tok)
  2129		case token.LBRACE:
  2130			s = p.parseBlockStmt()
  2131			p.expectSemi()
  2132		case token.IF:
  2133			s = p.parseIfStmt()
  2134		case token.SWITCH:
  2135			s = p.parseSwitchStmt()
  2136		case token.SELECT:
  2137			s = p.parseSelectStmt()
  2138		case token.FOR:
  2139			s = p.parseForStmt()
  2140		case token.SEMICOLON:
  2141			s = &ast.EmptyStmt{Semicolon: p.pos}
  2142			p.next()
  2143		case token.RBRACE:
  2144			// a semicolon may be omitted before a closing "}"
  2145			s = &ast.EmptyStmt{Semicolon: p.pos}
  2146		default:
  2147			// no statement found
  2148			pos := p.pos
  2149			p.errorExpected(pos, "statement")
  2150			syncStmt(p)
  2151			s = &ast.BadStmt{From: pos, To: p.pos}
  2152		}
  2153	
  2154		return
  2155	}
  2156	
  2157	// ----------------------------------------------------------------------------
  2158	// Declarations
  2159	
  2160	type parseSpecFunction func(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec
  2161	
  2162	func isValidImport(lit string) bool {
  2163		const illegalChars = `!"#$%&'()*,:;<=>?[\]^{|}` + "`\uFFFD"
  2164		s, _ := strconv.Unquote(lit) // go/scanner returns a legal string literal
  2165		for _, r := range s {
  2166			if !unicode.IsGraphic(r) || unicode.IsSpace(r) || strings.ContainsRune(illegalChars, r) {
  2167				return false
  2168			}
  2169		}
  2170		return s != ""
  2171	}
  2172	
  2173	func (p *parser) parseImportSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
  2174		if p.trace {
  2175			defer un(trace(p, "ImportSpec"))
  2176		}
  2177	
  2178		var ident *ast.Ident
  2179		switch p.tok {
  2180		case token.PERIOD:
  2181			ident = &ast.Ident{NamePos: p.pos, Name: "."}
  2182			p.next()
  2183		case token.IDENT:
  2184			ident = p.parseIdent()
  2185		}
  2186	
  2187		pos := p.pos
  2188		var path string
  2189		if p.tok == token.STRING {
  2190			path = p.lit
  2191			if !isValidImport(path) {
  2192				p.error(pos, "invalid import path: "+path)
  2193			}
  2194			p.next()
  2195		} else {
  2196			p.expect(token.STRING) // use expect() error handling
  2197		}
  2198		p.expectSemi() // call before accessing p.linecomment
  2199	
  2200		// collect imports
  2201		spec := &ast.ImportSpec{
  2202			Doc:     doc,
  2203			Name:    ident,
  2204			Path:    &ast.BasicLit{ValuePos: pos, Kind: token.STRING, Value: path},
  2205			Comment: p.lineComment,
  2206		}
  2207		p.imports = append(p.imports, spec)
  2208	
  2209		return spec
  2210	}
  2211	
  2212	func (p *parser) parseValueSpec(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec {
  2213		if p.trace {
  2214			defer un(trace(p, keyword.String()+"Spec"))
  2215		}
  2216	
  2217		idents := p.parseIdentList()
  2218		typ := p.tryType()
  2219		var values []ast.Expr
  2220		// always permit optional initialization for more tolerant parsing
  2221		if p.tok == token.ASSIGN {
  2222			p.next()
  2223			values = p.parseRhsList()
  2224		}
  2225		p.expectSemi() // call before accessing p.linecomment
  2226	
  2227		// Go spec: The scope of a constant or variable identifier declared inside
  2228		// a function begins at the end of the ConstSpec or VarSpec and ends at
  2229		// the end of the innermost containing block.
  2230		// (Global identifiers are resolved in a separate phase after parsing.)
  2231		spec := &ast.ValueSpec{
  2232			Doc:     doc,
  2233			Names:   idents,
  2234			Type:    typ,
  2235			Values:  values,
  2236			Comment: p.lineComment,
  2237		}
  2238		kind := ast.Con
  2239		if keyword == token.VAR {
  2240			kind = ast.Var
  2241		}
  2242		p.declare(spec, iota, p.topScope, kind, idents...)
  2243	
  2244		return spec
  2245	}
  2246	
  2247	func (p *parser) parseTypeSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
  2248		if p.trace {
  2249			defer un(trace(p, "TypeSpec"))
  2250		}
  2251	
  2252		ident := p.parseIdent()
  2253	
  2254		// Go spec: The scope of a type identifier declared inside a function begins
  2255		// at the identifier in the TypeSpec and ends at the end of the innermost
  2256		// containing block.
  2257		// (Global identifiers are resolved in a separate phase after parsing.)
  2258		spec := &ast.TypeSpec{Doc: doc, Name: ident}
  2259		p.declare(spec, nil, p.topScope, ast.Typ, ident)
  2260	
  2261		spec.Type = p.parseType()
  2262		p.expectSemi() // call before accessing p.linecomment
  2263		spec.Comment = p.lineComment
  2264	
  2265		return spec
  2266	}
  2267	
  2268	func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl {
  2269		if p.trace {
  2270			defer un(trace(p, "GenDecl("+keyword.String()+")"))
  2271		}
  2272	
  2273		doc := p.leadComment
  2274		pos := p.expect(keyword)
  2275		var lparen, rparen token.Pos
  2276		var list []ast.Spec
  2277		if p.tok == token.LPAREN {
  2278			lparen = p.pos
  2279			p.next()
  2280			for iota := 0; p.tok != token.RPAREN && p.tok != token.EOF; iota++ {
  2281				list = append(list, f(p.leadComment, keyword, iota))
  2282			}
  2283			rparen = p.expect(token.RPAREN)
  2284			p.expectSemi()
  2285		} else {
  2286			list = append(list, f(nil, keyword, 0))
  2287		}
  2288	
  2289		return &ast.GenDecl{
  2290			Doc:    doc,
  2291			TokPos: pos,
  2292			Tok:    keyword,
  2293			Lparen: lparen,
  2294			Specs:  list,
  2295			Rparen: rparen,
  2296		}
  2297	}
  2298	
  2299	func (p *parser) parseReceiver(scope *ast.Scope) *ast.FieldList {
  2300		if p.trace {
  2301			defer un(trace(p, "Receiver"))
  2302		}
  2303	
  2304		par := p.parseParameters(scope, false)
  2305	
  2306		// must have exactly one receiver
  2307		if par.NumFields() != 1 {
  2308			p.errorExpected(par.Opening, "exactly one receiver")
  2309			par.List = []*ast.Field{{Type: &ast.BadExpr{From: par.Opening, To: par.Closing + 1}}}
  2310			return par
  2311		}
  2312	
  2313		// recv type must be of the form ["*"] identifier
  2314		recv := par.List[0]
  2315		base := deref(recv.Type)
  2316		if _, isIdent := base.(*ast.Ident); !isIdent {
  2317			if _, isBad := base.(*ast.BadExpr); !isBad {
  2318				// only report error if it's a new one
  2319				p.errorExpected(base.Pos(), "(unqualified) identifier")
  2320			}
  2321			par.List = []*ast.Field{
  2322				{Type: &ast.BadExpr{From: recv.Pos(), To: p.safePos(recv.End())}},
  2323			}
  2324		}
  2325	
  2326		return par
  2327	}
  2328	
  2329	func (p *parser) parseFuncDecl() *ast.FuncDecl {
  2330		if p.trace {
  2331			defer un(trace(p, "FunctionDecl"))
  2332		}
  2333	
  2334		doc := p.leadComment
  2335		pos := p.expect(token.FUNC)
  2336		scope := ast.NewScope(p.topScope) // function scope
  2337	
  2338		var recv *ast.FieldList
  2339		if p.tok == token.LPAREN {
  2340			recv = p.parseReceiver(scope)
  2341		}
  2342	
  2343		ident := p.parseIdent()
  2344	
  2345		params, results := p.parseSignature(scope)
  2346	
  2347		var body *ast.BlockStmt
  2348		if p.tok == token.LBRACE {
  2349			body = p.parseBody(scope)
  2350		}
  2351		p.expectSemi()
  2352	
  2353		decl := &ast.FuncDecl{
  2354			Doc:  doc,
  2355			Recv: recv,
  2356			Name: ident,
  2357			Type: &ast.FuncType{
  2358				Func:    pos,
  2359				Params:  params,
  2360				Results: results,
  2361			},
  2362			Body: body,
  2363		}
  2364		if recv == nil {
  2365			// Go spec: The scope of an identifier denoting a constant, type,
  2366			// variable, or function (but not method) declared at top level
  2367			// (outside any function) is the package block.
  2368			//
  2369			// init() functions cannot be referred to and there may
  2370			// be more than one - don't put them in the pkgScope
  2371			if ident.Name != "init" {
  2372				p.declare(decl, nil, p.pkgScope, ast.Fun, ident)
  2373			}
  2374		}
  2375	
  2376		return decl
  2377	}
  2378	
  2379	func (p *parser) parseDecl(sync func(*parser)) ast.Decl {
  2380		if p.trace {
  2381			defer un(trace(p, "Declaration"))
  2382		}
  2383	
  2384		var f parseSpecFunction
  2385		switch p.tok {
  2386		case token.CONST, token.VAR:
  2387			f = p.parseValueSpec
  2388	
  2389		case token.TYPE:
  2390			f = p.parseTypeSpec
  2391	
  2392		case token.FUNC:
  2393			return p.parseFuncDecl()
  2394	
  2395		default:
  2396			pos := p.pos
  2397			p.errorExpected(pos, "declaration")
  2398			sync(p)
  2399			return &ast.BadDecl{From: pos, To: p.pos}
  2400		}
  2401	
  2402		return p.parseGenDecl(p.tok, f)
  2403	}
  2404	
  2405	// ----------------------------------------------------------------------------
  2406	// Source files
  2407	
  2408	func (p *parser) parseFile() *ast.File {
  2409		if p.trace {
  2410			defer un(trace(p, "File"))
  2411		}
  2412	
  2413		// Don't bother parsing the rest if we had errors scanning the first token.
  2414		// Likely not a Go source file at all.
  2415		if p.errors.Len() != 0 {
  2416			return nil
  2417		}
  2418	
  2419		// package clause
  2420		doc := p.leadComment
  2421		pos := p.expect(token.PACKAGE)
  2422		// Go spec: The package clause is not a declaration;
  2423		// the package name does not appear in any scope.
  2424		ident := p.parseIdent()
  2425		if ident.Name == "_" && p.mode&DeclarationErrors != 0 {
  2426			p.error(p.pos, "invalid package name _")
  2427		}
  2428		p.expectSemi()
  2429	
  2430		// Don't bother parsing the rest if we had errors parsing the package clause.
  2431		// Likely not a Go source file at all.
  2432		if p.errors.Len() != 0 {
  2433			return nil
  2434		}
  2435	
  2436		p.openScope()
  2437		p.pkgScope = p.topScope
  2438		var decls []ast.Decl
  2439		if p.mode&PackageClauseOnly == 0 {
  2440			// import decls
  2441			for p.tok == token.IMPORT {
  2442				decls = append(decls, p.parseGenDecl(token.IMPORT, p.parseImportSpec))
  2443			}
  2444	
  2445			if p.mode&ImportsOnly == 0 {
  2446				// rest of package body
  2447				for p.tok != token.EOF {
  2448					decls = append(decls, p.parseDecl(syncDecl))
  2449				}
  2450			}
  2451		}
  2452		p.closeScope()
  2453		assert(p.topScope == nil, "unbalanced scopes")
  2454		assert(p.labelScope == nil, "unbalanced label scopes")
  2455	
  2456		// resolve global identifiers within the same file
  2457		i := 0
  2458		for _, ident := range p.unresolved {
  2459			// i <= index for current ident
  2460			assert(ident.Obj == unresolved, "object already resolved")
  2461			ident.Obj = p.pkgScope.Lookup(ident.Name) // also removes unresolved sentinel
  2462			if ident.Obj == nil {
  2463				p.unresolved[i] = ident
  2464				i++
  2465			}
  2466		}
  2467	
  2468		return &ast.File{
  2469			Doc:        doc,
  2470			Package:    pos,
  2471			Name:       ident,
  2472			Decls:      decls,
  2473			Scope:      p.pkgScope,
  2474			Imports:    p.imports,
  2475			Unresolved: p.unresolved[0:i],
  2476			Comments:   p.comments,
  2477		}
  2478	}
  2479	

View as plain text