...
Run Format

Source file src/go/parser/parser.go

     1	// Copyright 2009 The Go Authors. All rights reserved.
     2	// Use of this source code is governed by a BSD-style
     3	// license that can be found in the LICENSE file.
     4	
     5	// Package parser implements a parser for Go source files. Input may be
     6	// provided in a variety of forms (see the various Parse* functions); the
     7	// output is an abstract syntax tree (AST) representing the Go source. The
     8	// parser is invoked through one of the Parse* functions.
     9	//
    10	package parser
    11	
    12	import (
    13		"fmt"
    14		"go/ast"
    15		"go/scanner"
    16		"go/token"
    17		"strconv"
    18		"strings"
    19		"unicode"
    20	)
    21	
    22	// The parser structure holds the parser's internal state.
    23	type parser struct {
    24		file    *token.File
    25		errors  scanner.ErrorList
    26		scanner scanner.Scanner
    27	
    28		// Tracing/debugging
    29		mode   Mode // parsing mode
    30		trace  bool // == (mode & Trace != 0)
    31		indent int  // indentation used for tracing output
    32	
    33		// Comments
    34		comments    []*ast.CommentGroup
    35		leadComment *ast.CommentGroup // last lead comment
    36		lineComment *ast.CommentGroup // last line comment
    37	
    38		// Next token
    39		pos token.Pos   // token position
    40		tok token.Token // one token look-ahead
    41		lit string      // token literal
    42	
    43		// Error recovery
    44		// (used to limit the number of calls to syncXXX functions
    45		// w/o making scanning progress - avoids potential endless
    46		// loops across multiple parser functions during error recovery)
    47		syncPos token.Pos // last synchronization position
    48		syncCnt int       // number of calls to syncXXX without progress
    49	
    50		// Non-syntactic parser control
    51		exprLev int  // < 0: in control clause, >= 0: in expression
    52		inRhs   bool // if set, the parser is parsing a rhs expression
    53	
    54		// Ordinary identifier scopes
    55		pkgScope   *ast.Scope        // pkgScope.Outer == nil
    56		topScope   *ast.Scope        // top-most scope; may be pkgScope
    57		unresolved []*ast.Ident      // unresolved identifiers
    58		imports    []*ast.ImportSpec // list of imports
    59	
    60		// Label scopes
    61		// (maintained by open/close LabelScope)
    62		labelScope  *ast.Scope     // label scope for current function
    63		targetStack [][]*ast.Ident // stack of unresolved labels
    64	}
    65	
    66	func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode Mode) {
    67		p.file = fset.AddFile(filename, -1, len(src))
    68		var m scanner.Mode
    69		if mode&ParseComments != 0 {
    70			m = scanner.ScanComments
    71		}
    72		eh := func(pos token.Position, msg string) { p.errors.Add(pos, msg) }
    73		p.scanner.Init(p.file, src, eh, m)
    74	
    75		p.mode = mode
    76		p.trace = mode&Trace != 0 // for convenience (p.trace is used frequently)
    77	
    78		p.next()
    79	}
    80	
    81	// ----------------------------------------------------------------------------
    82	// Scoping support
    83	
    84	func (p *parser) openScope() {
    85		p.topScope = ast.NewScope(p.topScope)
    86	}
    87	
    88	func (p *parser) closeScope() {
    89		p.topScope = p.topScope.Outer
    90	}
    91	
    92	func (p *parser) openLabelScope() {
    93		p.labelScope = ast.NewScope(p.labelScope)
    94		p.targetStack = append(p.targetStack, nil)
    95	}
    96	
    97	func (p *parser) closeLabelScope() {
    98		// resolve labels
    99		n := len(p.targetStack) - 1
   100		scope := p.labelScope
   101		for _, ident := range p.targetStack[n] {
   102			ident.Obj = scope.Lookup(ident.Name)
   103			if ident.Obj == nil && p.mode&DeclarationErrors != 0 {
   104				p.error(ident.Pos(), fmt.Sprintf("label %s undefined", ident.Name))
   105			}
   106		}
   107		// pop label scope
   108		p.targetStack = p.targetStack[0:n]
   109		p.labelScope = p.labelScope.Outer
   110	}
   111	
   112	func (p *parser) declare(decl, data interface{}, scope *ast.Scope, kind ast.ObjKind, idents ...*ast.Ident) {
   113		for _, ident := range idents {
   114			assert(ident.Obj == nil, "identifier already declared or resolved")
   115			obj := ast.NewObj(kind, ident.Name)
   116			// remember the corresponding declaration for redeclaration
   117			// errors and global variable resolution/typechecking phase
   118			obj.Decl = decl
   119			obj.Data = data
   120			ident.Obj = obj
   121			if ident.Name != "_" {
   122				if alt := scope.Insert(obj); alt != nil && p.mode&DeclarationErrors != 0 {
   123					prevDecl := ""
   124					if pos := alt.Pos(); pos.IsValid() {
   125						prevDecl = fmt.Sprintf("\n\tprevious declaration at %s", p.file.Position(pos))
   126					}
   127					p.error(ident.Pos(), fmt.Sprintf("%s redeclared in this block%s", ident.Name, prevDecl))
   128				}
   129			}
   130		}
   131	}
   132	
   133	func (p *parser) shortVarDecl(decl *ast.AssignStmt, list []ast.Expr) {
   134		// Go spec: A short variable declaration may redeclare variables
   135		// provided they were originally declared in the same block with
   136		// the same type, and at least one of the non-blank variables is new.
   137		n := 0 // number of new variables
   138		for _, x := range list {
   139			if ident, isIdent := x.(*ast.Ident); isIdent {
   140				assert(ident.Obj == nil, "identifier already declared or resolved")
   141				obj := ast.NewObj(ast.Var, ident.Name)
   142				// remember corresponding assignment for other tools
   143				obj.Decl = decl
   144				ident.Obj = obj
   145				if ident.Name != "_" {
   146					if alt := p.topScope.Insert(obj); alt != nil {
   147						ident.Obj = alt // redeclaration
   148					} else {
   149						n++ // new declaration
   150					}
   151				}
   152			} else {
   153				p.errorExpected(x.Pos(), "identifier on left side of :=")
   154			}
   155		}
   156		if n == 0 && p.mode&DeclarationErrors != 0 {
   157			p.error(list[0].Pos(), "no new variables on left side of :=")
   158		}
   159	}
   160	
   161	// The unresolved object is a sentinel to mark identifiers that have been added
   162	// to the list of unresolved identifiers. The sentinel is only used for verifying
   163	// internal consistency.
   164	var unresolved = new(ast.Object)
   165	
   166	// If x is an identifier, tryResolve attempts to resolve x by looking up
   167	// the object it denotes. If no object is found and collectUnresolved is
   168	// set, x is marked as unresolved and collected in the list of unresolved
   169	// identifiers.
   170	//
   171	func (p *parser) tryResolve(x ast.Expr, collectUnresolved bool) {
   172		// nothing to do if x is not an identifier or the blank identifier
   173		ident, _ := x.(*ast.Ident)
   174		if ident == nil {
   175			return
   176		}
   177		assert(ident.Obj == nil, "identifier already declared or resolved")
   178		if ident.Name == "_" {
   179			return
   180		}
   181		// try to resolve the identifier
   182		for s := p.topScope; s != nil; s = s.Outer {
   183			if obj := s.Lookup(ident.Name); obj != nil {
   184				ident.Obj = obj
   185				return
   186			}
   187		}
   188		// all local scopes are known, so any unresolved identifier
   189		// must be found either in the file scope, package scope
   190		// (perhaps in another file), or universe scope --- collect
   191		// them so that they can be resolved later
   192		if collectUnresolved {
   193			ident.Obj = unresolved
   194			p.unresolved = append(p.unresolved, ident)
   195		}
   196	}
   197	
   198	func (p *parser) resolve(x ast.Expr) {
   199		p.tryResolve(x, true)
   200	}
   201	
   202	// ----------------------------------------------------------------------------
   203	// Parsing support
   204	
   205	func (p *parser) printTrace(a ...interface{}) {
   206		const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . "
   207		const n = len(dots)
   208		pos := p.file.Position(p.pos)
   209		fmt.Printf("%5d:%3d: ", pos.Line, pos.Column)
   210		i := 2 * p.indent
   211		for i > n {
   212			fmt.Print(dots)
   213			i -= n
   214		}
   215		// i <= n
   216		fmt.Print(dots[0:i])
   217		fmt.Println(a...)
   218	}
   219	
   220	func trace(p *parser, msg string) *parser {
   221		p.printTrace(msg, "(")
   222		p.indent++
   223		return p
   224	}
   225	
   226	// Usage pattern: defer un(trace(p, "..."))
   227	func un(p *parser) {
   228		p.indent--
   229		p.printTrace(")")
   230	}
   231	
   232	// Advance to the next token.
   233	func (p *parser) next0() {
   234		// Because of one-token look-ahead, print the previous token
   235		// when tracing as it provides a more readable output. The
   236		// very first token (!p.pos.IsValid()) is not initialized
   237		// (it is token.ILLEGAL), so don't print it .
   238		if p.trace && p.pos.IsValid() {
   239			s := p.tok.String()
   240			switch {
   241			case p.tok.IsLiteral():
   242				p.printTrace(s, p.lit)
   243			case p.tok.IsOperator(), p.tok.IsKeyword():
   244				p.printTrace("\"" + s + "\"")
   245			default:
   246				p.printTrace(s)
   247			}
   248		}
   249	
   250		p.pos, p.tok, p.lit = p.scanner.Scan()
   251	}
   252	
   253	// Consume a comment and return it and the line on which it ends.
   254	func (p *parser) consumeComment() (comment *ast.Comment, endline int) {
   255		// /*-style comments may end on a different line than where they start.
   256		// Scan the comment for '\n' chars and adjust endline accordingly.
   257		endline = p.file.Line(p.pos)
   258		if p.lit[1] == '*' {
   259			// don't use range here - no need to decode Unicode code points
   260			for i := 0; i < len(p.lit); i++ {
   261				if p.lit[i] == '\n' {
   262					endline++
   263				}
   264			}
   265		}
   266	
   267		comment = &ast.Comment{Slash: p.pos, Text: p.lit}
   268		p.next0()
   269	
   270		return
   271	}
   272	
   273	// Consume a group of adjacent comments, add it to the parser's
   274	// comments list, and return it together with the line at which
   275	// the last comment in the group ends. A non-comment token or n
   276	// empty lines terminate a comment group.
   277	//
   278	func (p *parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline int) {
   279		var list []*ast.Comment
   280		endline = p.file.Line(p.pos)
   281		for p.tok == token.COMMENT && p.file.Line(p.pos) <= endline+n {
   282			var comment *ast.Comment
   283			comment, endline = p.consumeComment()
   284			list = append(list, comment)
   285		}
   286	
   287		// add comment group to the comments list
   288		comments = &ast.CommentGroup{List: list}
   289		p.comments = append(p.comments, comments)
   290	
   291		return
   292	}
   293	
   294	// Advance to the next non-comment token. In the process, collect
   295	// any comment groups encountered, and remember the last lead and
   296	// and line comments.
   297	//
   298	// A lead comment is a comment group that starts and ends in a
   299	// line without any other tokens and that is followed by a non-comment
   300	// token on the line immediately after the comment group.
   301	//
   302	// A line comment is a comment group that follows a non-comment
   303	// token on the same line, and that has no tokens after it on the line
   304	// where it ends.
   305	//
   306	// Lead and line comments may be considered documentation that is
   307	// stored in the AST.
   308	//
   309	func (p *parser) next() {
   310		p.leadComment = nil
   311		p.lineComment = nil
   312		prev := p.pos
   313		p.next0()
   314	
   315		if p.tok == token.COMMENT {
   316			var comment *ast.CommentGroup
   317			var endline int
   318	
   319			if p.file.Line(p.pos) == p.file.Line(prev) {
   320				// The comment is on same line as the previous token; it
   321				// cannot be a lead comment but may be a line comment.
   322				comment, endline = p.consumeCommentGroup(0)
   323				if p.file.Line(p.pos) != endline {
   324					// The next token is on a different line, thus
   325					// the last comment group is a line comment.
   326					p.lineComment = comment
   327				}
   328			}
   329	
   330			// consume successor comments, if any
   331			endline = -1
   332			for p.tok == token.COMMENT {
   333				comment, endline = p.consumeCommentGroup(1)
   334			}
   335	
   336			if endline+1 == p.file.Line(p.pos) {
   337				// The next token is following on the line immediately after the
   338				// comment group, thus the last comment group is a lead comment.
   339				p.leadComment = comment
   340			}
   341		}
   342	}
   343	
   344	// A bailout panic is raised to indicate early termination.
   345	type bailout struct{}
   346	
   347	func (p *parser) error(pos token.Pos, msg string) {
   348		epos := p.file.Position(pos)
   349	
   350		// If AllErrors is not set, discard errors reported on the same line
   351		// as the last recorded error and stop parsing if there are more than
   352		// 10 errors.
   353		if p.mode&AllErrors == 0 {
   354			n := len(p.errors)
   355			if n > 0 && p.errors[n-1].Pos.Line == epos.Line {
   356				return // discard - likely a spurious error
   357			}
   358			if n > 10 {
   359				panic(bailout{})
   360			}
   361		}
   362	
   363		p.errors.Add(epos, msg)
   364	}
   365	
   366	func (p *parser) errorExpected(pos token.Pos, msg string) {
   367		msg = "expected " + msg
   368		if pos == p.pos {
   369			// the error happened at the current position;
   370			// make the error message more specific
   371			if p.tok == token.SEMICOLON && p.lit == "\n" {
   372				msg += ", found newline"
   373			} else {
   374				msg += ", found '" + p.tok.String() + "'"
   375				if p.tok.IsLiteral() {
   376					msg += " " + p.lit
   377				}
   378			}
   379		}
   380		p.error(pos, msg)
   381	}
   382	
   383	func (p *parser) expect(tok token.Token) token.Pos {
   384		pos := p.pos
   385		if p.tok != tok {
   386			p.errorExpected(pos, "'"+tok.String()+"'")
   387		}
   388		p.next() // make progress
   389		return pos
   390	}
   391	
   392	// expectClosing is like expect but provides a better error message
   393	// for the common case of a missing comma before a newline.
   394	//
   395	func (p *parser) expectClosing(tok token.Token, context string) token.Pos {
   396		if p.tok != tok && p.tok == token.SEMICOLON && p.lit == "\n" {
   397			p.error(p.pos, "missing ',' before newline in "+context)
   398			p.next()
   399		}
   400		return p.expect(tok)
   401	}
   402	
   403	func (p *parser) expectSemi() {
   404		// semicolon is optional before a closing ')' or '}'
   405		if p.tok != token.RPAREN && p.tok != token.RBRACE {
   406			if p.tok == token.SEMICOLON {
   407				p.next()
   408			} else {
   409				p.errorExpected(p.pos, "';'")
   410				syncStmt(p)
   411			}
   412		}
   413	}
   414	
   415	func (p *parser) atComma(context string) bool {
   416		if p.tok == token.COMMA {
   417			return true
   418		}
   419		if p.tok == token.SEMICOLON && p.lit == "\n" {
   420			p.error(p.pos, "missing ',' before newline in "+context)
   421			return true // "insert" the comma and continue
   422	
   423		}
   424		return false
   425	}
   426	
   427	func assert(cond bool, msg string) {
   428		if !cond {
   429			panic("go/parser internal error: " + msg)
   430		}
   431	}
   432	
   433	// syncStmt advances to the next statement.
   434	// Used for synchronization after an error.
   435	//
   436	func syncStmt(p *parser) {
   437		for {
   438			switch p.tok {
   439			case token.BREAK, token.CONST, token.CONTINUE, token.DEFER,
   440				token.FALLTHROUGH, token.FOR, token.GO, token.GOTO,
   441				token.IF, token.RETURN, token.SELECT, token.SWITCH,
   442				token.TYPE, token.VAR:
   443				// Return only if parser made some progress since last
   444				// sync or if it has not reached 10 sync calls without
   445				// progress. Otherwise consume at least one token to
   446				// avoid an endless parser loop (it is possible that
   447				// both parseOperand and parseStmt call syncStmt and
   448				// correctly do not advance, thus the need for the
   449				// invocation limit p.syncCnt).
   450				if p.pos == p.syncPos && p.syncCnt < 10 {
   451					p.syncCnt++
   452					return
   453				}
   454				if p.pos > p.syncPos {
   455					p.syncPos = p.pos
   456					p.syncCnt = 0
   457					return
   458				}
   459				// Reaching here indicates a parser bug, likely an
   460				// incorrect token list in this function, but it only
   461				// leads to skipping of possibly correct code if a
   462				// previous error is present, and thus is preferred
   463				// over a non-terminating parse.
   464			case token.EOF:
   465				return
   466			}
   467			p.next()
   468		}
   469	}
   470	
   471	// syncDecl advances to the next declaration.
   472	// Used for synchronization after an error.
   473	//
   474	func syncDecl(p *parser) {
   475		for {
   476			switch p.tok {
   477			case token.CONST, token.TYPE, token.VAR:
   478				// see comments in syncStmt
   479				if p.pos == p.syncPos && p.syncCnt < 10 {
   480					p.syncCnt++
   481					return
   482				}
   483				if p.pos > p.syncPos {
   484					p.syncPos = p.pos
   485					p.syncCnt = 0
   486					return
   487				}
   488			case token.EOF:
   489				return
   490			}
   491			p.next()
   492		}
   493	}
   494	
   495	// safePos returns a valid file position for a given position: If pos
   496	// is valid to begin with, safePos returns pos. If pos is out-of-range,
   497	// safePos returns the EOF position.
   498	//
   499	// This is hack to work around "artificial" end positions in the AST which
   500	// are computed by adding 1 to (presumably valid) token positions. If the
   501	// token positions are invalid due to parse errors, the resulting end position
   502	// may be past the file's EOF position, which would lead to panics if used
   503	// later on.
   504	//
   505	func (p *parser) safePos(pos token.Pos) (res token.Pos) {
   506		defer func() {
   507			if recover() != nil {
   508				res = token.Pos(p.file.Base() + p.file.Size()) // EOF position
   509			}
   510		}()
   511		_ = p.file.Offset(pos) // trigger a panic if position is out-of-range
   512		return pos
   513	}
   514	
   515	// ----------------------------------------------------------------------------
   516	// Identifiers
   517	
   518	func (p *parser) parseIdent() *ast.Ident {
   519		pos := p.pos
   520		name := "_"
   521		if p.tok == token.IDENT {
   522			name = p.lit
   523			p.next()
   524		} else {
   525			p.expect(token.IDENT) // use expect() error handling
   526		}
   527		return &ast.Ident{NamePos: pos, Name: name}
   528	}
   529	
   530	func (p *parser) parseIdentList() (list []*ast.Ident) {
   531		if p.trace {
   532			defer un(trace(p, "IdentList"))
   533		}
   534	
   535		list = append(list, p.parseIdent())
   536		for p.tok == token.COMMA {
   537			p.next()
   538			list = append(list, p.parseIdent())
   539		}
   540	
   541		return
   542	}
   543	
   544	// ----------------------------------------------------------------------------
   545	// Common productions
   546	
   547	// If lhs is set, result list elements which are identifiers are not resolved.
   548	func (p *parser) parseExprList(lhs bool) (list []ast.Expr) {
   549		if p.trace {
   550			defer un(trace(p, "ExpressionList"))
   551		}
   552	
   553		list = append(list, p.checkExpr(p.parseExpr(lhs)))
   554		for p.tok == token.COMMA {
   555			p.next()
   556			list = append(list, p.checkExpr(p.parseExpr(lhs)))
   557		}
   558	
   559		return
   560	}
   561	
   562	func (p *parser) parseLhsList() []ast.Expr {
   563		old := p.inRhs
   564		p.inRhs = false
   565		list := p.parseExprList(true)
   566		switch p.tok {
   567		case token.DEFINE:
   568			// lhs of a short variable declaration
   569			// but doesn't enter scope until later:
   570			// caller must call p.shortVarDecl(p.makeIdentList(list))
   571			// at appropriate time.
   572		case token.COLON:
   573			// lhs of a label declaration or a communication clause of a select
   574			// statement (parseLhsList is not called when parsing the case clause
   575			// of a switch statement):
   576			// - labels are declared by the caller of parseLhsList
   577			// - for communication clauses, if there is a stand-alone identifier
   578			//   followed by a colon, we have a syntax error; there is no need
   579			//   to resolve the identifier in that case
   580		default:
   581			// identifiers must be declared elsewhere
   582			for _, x := range list {
   583				p.resolve(x)
   584			}
   585		}
   586		p.inRhs = old
   587		return list
   588	}
   589	
   590	func (p *parser) parseRhsList() []ast.Expr {
   591		old := p.inRhs
   592		p.inRhs = true
   593		list := p.parseExprList(false)
   594		p.inRhs = old
   595		return list
   596	}
   597	
   598	// ----------------------------------------------------------------------------
   599	// Types
   600	
   601	func (p *parser) parseType() ast.Expr {
   602		if p.trace {
   603			defer un(trace(p, "Type"))
   604		}
   605	
   606		typ := p.tryType()
   607	
   608		if typ == nil {
   609			pos := p.pos
   610			p.errorExpected(pos, "type")
   611			p.next() // make progress
   612			return &ast.BadExpr{From: pos, To: p.pos}
   613		}
   614	
   615		return typ
   616	}
   617	
   618	// If the result is an identifier, it is not resolved.
   619	func (p *parser) parseTypeName() ast.Expr {
   620		if p.trace {
   621			defer un(trace(p, "TypeName"))
   622		}
   623	
   624		ident := p.parseIdent()
   625		// don't resolve ident yet - it may be a parameter or field name
   626	
   627		if p.tok == token.PERIOD {
   628			// ident is a package name
   629			p.next()
   630			p.resolve(ident)
   631			sel := p.parseIdent()
   632			return &ast.SelectorExpr{X: ident, Sel: sel}
   633		}
   634	
   635		return ident
   636	}
   637	
   638	func (p *parser) parseArrayType() ast.Expr {
   639		if p.trace {
   640			defer un(trace(p, "ArrayType"))
   641		}
   642	
   643		lbrack := p.expect(token.LBRACK)
   644		p.exprLev++
   645		var len ast.Expr
   646		// always permit ellipsis for more fault-tolerant parsing
   647		if p.tok == token.ELLIPSIS {
   648			len = &ast.Ellipsis{Ellipsis: p.pos}
   649			p.next()
   650		} else if p.tok != token.RBRACK {
   651			len = p.parseRhs()
   652		}
   653		p.exprLev--
   654		p.expect(token.RBRACK)
   655		elt := p.parseType()
   656	
   657		return &ast.ArrayType{Lbrack: lbrack, Len: len, Elt: elt}
   658	}
   659	
   660	func (p *parser) makeIdentList(list []ast.Expr) []*ast.Ident {
   661		idents := make([]*ast.Ident, len(list))
   662		for i, x := range list {
   663			ident, isIdent := x.(*ast.Ident)
   664			if !isIdent {
   665				if _, isBad := x.(*ast.BadExpr); !isBad {
   666					// only report error if it's a new one
   667					p.errorExpected(x.Pos(), "identifier")
   668				}
   669				ident = &ast.Ident{NamePos: x.Pos(), Name: "_"}
   670			}
   671			idents[i] = ident
   672		}
   673		return idents
   674	}
   675	
   676	func (p *parser) parseFieldDecl(scope *ast.Scope) *ast.Field {
   677		if p.trace {
   678			defer un(trace(p, "FieldDecl"))
   679		}
   680	
   681		doc := p.leadComment
   682	
   683		// FieldDecl
   684		list, typ := p.parseVarList(false)
   685	
   686		// Tag
   687		var tag *ast.BasicLit
   688		if p.tok == token.STRING {
   689			tag = &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
   690			p.next()
   691		}
   692	
   693		// analyze case
   694		var idents []*ast.Ident
   695		if typ != nil {
   696			// IdentifierList Type
   697			idents = p.makeIdentList(list)
   698		} else {
   699			// ["*"] TypeName (AnonymousField)
   700			typ = list[0] // we always have at least one element
   701			if n := len(list); n > 1 || !isTypeName(deref(typ)) {
   702				pos := typ.Pos()
   703				p.errorExpected(pos, "anonymous field")
   704				typ = &ast.BadExpr{From: pos, To: p.safePos(list[n-1].End())}
   705			}
   706		}
   707	
   708		p.expectSemi() // call before accessing p.linecomment
   709	
   710		field := &ast.Field{Doc: doc, Names: idents, Type: typ, Tag: tag, Comment: p.lineComment}
   711		p.declare(field, nil, scope, ast.Var, idents...)
   712		p.resolve(typ)
   713	
   714		return field
   715	}
   716	
   717	func (p *parser) parseStructType() *ast.StructType {
   718		if p.trace {
   719			defer un(trace(p, "StructType"))
   720		}
   721	
   722		pos := p.expect(token.STRUCT)
   723		lbrace := p.expect(token.LBRACE)
   724		scope := ast.NewScope(nil) // struct scope
   725		var list []*ast.Field
   726		for p.tok == token.IDENT || p.tok == token.MUL || p.tok == token.LPAREN {
   727			// a field declaration cannot start with a '(' but we accept
   728			// it here for more robust parsing and better error messages
   729			// (parseFieldDecl will check and complain if necessary)
   730			list = append(list, p.parseFieldDecl(scope))
   731		}
   732		rbrace := p.expect(token.RBRACE)
   733	
   734		return &ast.StructType{
   735			Struct: pos,
   736			Fields: &ast.FieldList{
   737				Opening: lbrace,
   738				List:    list,
   739				Closing: rbrace,
   740			},
   741		}
   742	}
   743	
   744	func (p *parser) parsePointerType() *ast.StarExpr {
   745		if p.trace {
   746			defer un(trace(p, "PointerType"))
   747		}
   748	
   749		star := p.expect(token.MUL)
   750		base := p.parseType()
   751	
   752		return &ast.StarExpr{Star: star, X: base}
   753	}
   754	
   755	// If the result is an identifier, it is not resolved.
   756	func (p *parser) tryVarType(isParam bool) ast.Expr {
   757		if isParam && p.tok == token.ELLIPSIS {
   758			pos := p.pos
   759			p.next()
   760			typ := p.tryIdentOrType() // don't use parseType so we can provide better error message
   761			if typ != nil {
   762				p.resolve(typ)
   763			} else {
   764				p.error(pos, "'...' parameter is missing type")
   765				typ = &ast.BadExpr{From: pos, To: p.pos}
   766			}
   767			return &ast.Ellipsis{Ellipsis: pos, Elt: typ}
   768		}
   769		return p.tryIdentOrType()
   770	}
   771	
   772	// If the result is an identifier, it is not resolved.
   773	func (p *parser) parseVarType(isParam bool) ast.Expr {
   774		typ := p.tryVarType(isParam)
   775		if typ == nil {
   776			pos := p.pos
   777			p.errorExpected(pos, "type")
   778			p.next() // make progress
   779			typ = &ast.BadExpr{From: pos, To: p.pos}
   780		}
   781		return typ
   782	}
   783	
   784	// If any of the results are identifiers, they are not resolved.
   785	func (p *parser) parseVarList(isParam bool) (list []ast.Expr, typ ast.Expr) {
   786		if p.trace {
   787			defer un(trace(p, "VarList"))
   788		}
   789	
   790		// a list of identifiers looks like a list of type names
   791		//
   792		// parse/tryVarType accepts any type (including parenthesized
   793		// ones) even though the syntax does not permit them here: we
   794		// accept them all for more robust parsing and complain later
   795		for typ := p.parseVarType(isParam); typ != nil; {
   796			list = append(list, typ)
   797			if p.tok != token.COMMA {
   798				break
   799			}
   800			p.next()
   801			typ = p.tryVarType(isParam) // maybe nil as in: func f(int,) {}
   802		}
   803	
   804		// if we had a list of identifiers, it must be followed by a type
   805		typ = p.tryVarType(isParam)
   806	
   807		return
   808	}
   809	
   810	func (p *parser) parseParameterList(scope *ast.Scope, ellipsisOk bool) (params []*ast.Field) {
   811		if p.trace {
   812			defer un(trace(p, "ParameterList"))
   813		}
   814	
   815		// ParameterDecl
   816		list, typ := p.parseVarList(ellipsisOk)
   817	
   818		// analyze case
   819		if typ != nil {
   820			// IdentifierList Type
   821			idents := p.makeIdentList(list)
   822			field := &ast.Field{Names: idents, Type: typ}
   823			params = append(params, field)
   824			// Go spec: The scope of an identifier denoting a function
   825			// parameter or result variable is the function body.
   826			p.declare(field, nil, scope, ast.Var, idents...)
   827			p.resolve(typ)
   828			if !p.atComma("parameter list") {
   829				return
   830			}
   831			p.next()
   832			for p.tok != token.RPAREN && p.tok != token.EOF {
   833				idents := p.parseIdentList()
   834				typ := p.parseVarType(ellipsisOk)
   835				field := &ast.Field{Names: idents, Type: typ}
   836				params = append(params, field)
   837				// Go spec: The scope of an identifier denoting a function
   838				// parameter or result variable is the function body.
   839				p.declare(field, nil, scope, ast.Var, idents...)
   840				p.resolve(typ)
   841				if !p.atComma("parameter list") {
   842					break
   843				}
   844				p.next()
   845			}
   846			return
   847		}
   848	
   849		// Type { "," Type } (anonymous parameters)
   850		params = make([]*ast.Field, len(list))
   851		for i, typ := range list {
   852			p.resolve(typ)
   853			params[i] = &ast.Field{Type: typ}
   854		}
   855		return
   856	}
   857	
   858	func (p *parser) parseParameters(scope *ast.Scope, ellipsisOk bool) *ast.FieldList {
   859		if p.trace {
   860			defer un(trace(p, "Parameters"))
   861		}
   862	
   863		var params []*ast.Field
   864		lparen := p.expect(token.LPAREN)
   865		if p.tok != token.RPAREN {
   866			params = p.parseParameterList(scope, ellipsisOk)
   867		}
   868		rparen := p.expect(token.RPAREN)
   869	
   870		return &ast.FieldList{Opening: lparen, List: params, Closing: rparen}
   871	}
   872	
   873	func (p *parser) parseResult(scope *ast.Scope) *ast.FieldList {
   874		if p.trace {
   875			defer un(trace(p, "Result"))
   876		}
   877	
   878		if p.tok == token.LPAREN {
   879			return p.parseParameters(scope, false)
   880		}
   881	
   882		typ := p.tryType()
   883		if typ != nil {
   884			list := make([]*ast.Field, 1)
   885			list[0] = &ast.Field{Type: typ}
   886			return &ast.FieldList{List: list}
   887		}
   888	
   889		return nil
   890	}
   891	
   892	func (p *parser) parseSignature(scope *ast.Scope) (params, results *ast.FieldList) {
   893		if p.trace {
   894			defer un(trace(p, "Signature"))
   895		}
   896	
   897		params = p.parseParameters(scope, true)
   898		results = p.parseResult(scope)
   899	
   900		return
   901	}
   902	
   903	func (p *parser) parseFuncType() (*ast.FuncType, *ast.Scope) {
   904		if p.trace {
   905			defer un(trace(p, "FuncType"))
   906		}
   907	
   908		pos := p.expect(token.FUNC)
   909		scope := ast.NewScope(p.topScope) // function scope
   910		params, results := p.parseSignature(scope)
   911	
   912		return &ast.FuncType{Func: pos, Params: params, Results: results}, scope
   913	}
   914	
   915	func (p *parser) parseMethodSpec(scope *ast.Scope) *ast.Field {
   916		if p.trace {
   917			defer un(trace(p, "MethodSpec"))
   918		}
   919	
   920		doc := p.leadComment
   921		var idents []*ast.Ident
   922		var typ ast.Expr
   923		x := p.parseTypeName()
   924		if ident, isIdent := x.(*ast.Ident); isIdent && p.tok == token.LPAREN {
   925			// method
   926			idents = []*ast.Ident{ident}
   927			scope := ast.NewScope(nil) // method scope
   928			params, results := p.parseSignature(scope)
   929			typ = &ast.FuncType{Func: token.NoPos, Params: params, Results: results}
   930		} else {
   931			// embedded interface
   932			typ = x
   933			p.resolve(typ)
   934		}
   935		p.expectSemi() // call before accessing p.linecomment
   936	
   937		spec := &ast.Field{Doc: doc, Names: idents, Type: typ, Comment: p.lineComment}
   938		p.declare(spec, nil, scope, ast.Fun, idents...)
   939	
   940		return spec
   941	}
   942	
   943	func (p *parser) parseInterfaceType() *ast.InterfaceType {
   944		if p.trace {
   945			defer un(trace(p, "InterfaceType"))
   946		}
   947	
   948		pos := p.expect(token.INTERFACE)
   949		lbrace := p.expect(token.LBRACE)
   950		scope := ast.NewScope(nil) // interface scope
   951		var list []*ast.Field
   952		for p.tok == token.IDENT {
   953			list = append(list, p.parseMethodSpec(scope))
   954		}
   955		rbrace := p.expect(token.RBRACE)
   956	
   957		return &ast.InterfaceType{
   958			Interface: pos,
   959			Methods: &ast.FieldList{
   960				Opening: lbrace,
   961				List:    list,
   962				Closing: rbrace,
   963			},
   964		}
   965	}
   966	
   967	func (p *parser) parseMapType() *ast.MapType {
   968		if p.trace {
   969			defer un(trace(p, "MapType"))
   970		}
   971	
   972		pos := p.expect(token.MAP)
   973		p.expect(token.LBRACK)
   974		key := p.parseType()
   975		p.expect(token.RBRACK)
   976		value := p.parseType()
   977	
   978		return &ast.MapType{Map: pos, Key: key, Value: value}
   979	}
   980	
   981	func (p *parser) parseChanType() *ast.ChanType {
   982		if p.trace {
   983			defer un(trace(p, "ChanType"))
   984		}
   985	
   986		pos := p.pos
   987		dir := ast.SEND | ast.RECV
   988		var arrow token.Pos
   989		if p.tok == token.CHAN {
   990			p.next()
   991			if p.tok == token.ARROW {
   992				arrow = p.pos
   993				p.next()
   994				dir = ast.SEND
   995			}
   996		} else {
   997			arrow = p.expect(token.ARROW)
   998			p.expect(token.CHAN)
   999			dir = ast.RECV
  1000		}
  1001		value := p.parseType()
  1002	
  1003		return &ast.ChanType{Begin: pos, Arrow: arrow, Dir: dir, Value: value}
  1004	}
  1005	
  1006	// If the result is an identifier, it is not resolved.
  1007	func (p *parser) tryIdentOrType() ast.Expr {
  1008		switch p.tok {
  1009		case token.IDENT:
  1010			return p.parseTypeName()
  1011		case token.LBRACK:
  1012			return p.parseArrayType()
  1013		case token.STRUCT:
  1014			return p.parseStructType()
  1015		case token.MUL:
  1016			return p.parsePointerType()
  1017		case token.FUNC:
  1018			typ, _ := p.parseFuncType()
  1019			return typ
  1020		case token.INTERFACE:
  1021			return p.parseInterfaceType()
  1022		case token.MAP:
  1023			return p.parseMapType()
  1024		case token.CHAN, token.ARROW:
  1025			return p.parseChanType()
  1026		case token.LPAREN:
  1027			lparen := p.pos
  1028			p.next()
  1029			typ := p.parseType()
  1030			rparen := p.expect(token.RPAREN)
  1031			return &ast.ParenExpr{Lparen: lparen, X: typ, Rparen: rparen}
  1032		}
  1033	
  1034		// no type found
  1035		return nil
  1036	}
  1037	
  1038	func (p *parser) tryType() ast.Expr {
  1039		typ := p.tryIdentOrType()
  1040		if typ != nil {
  1041			p.resolve(typ)
  1042		}
  1043		return typ
  1044	}
  1045	
  1046	// ----------------------------------------------------------------------------
  1047	// Blocks
  1048	
  1049	func (p *parser) parseStmtList() (list []ast.Stmt) {
  1050		if p.trace {
  1051			defer un(trace(p, "StatementList"))
  1052		}
  1053	
  1054		for p.tok != token.CASE && p.tok != token.DEFAULT && p.tok != token.RBRACE && p.tok != token.EOF {
  1055			list = append(list, p.parseStmt())
  1056		}
  1057	
  1058		return
  1059	}
  1060	
  1061	func (p *parser) parseBody(scope *ast.Scope) *ast.BlockStmt {
  1062		if p.trace {
  1063			defer un(trace(p, "Body"))
  1064		}
  1065	
  1066		lbrace := p.expect(token.LBRACE)
  1067		p.topScope = scope // open function scope
  1068		p.openLabelScope()
  1069		list := p.parseStmtList()
  1070		p.closeLabelScope()
  1071		p.closeScope()
  1072		rbrace := p.expect(token.RBRACE)
  1073	
  1074		return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
  1075	}
  1076	
  1077	func (p *parser) parseBlockStmt() *ast.BlockStmt {
  1078		if p.trace {
  1079			defer un(trace(p, "BlockStmt"))
  1080		}
  1081	
  1082		lbrace := p.expect(token.LBRACE)
  1083		p.openScope()
  1084		list := p.parseStmtList()
  1085		p.closeScope()
  1086		rbrace := p.expect(token.RBRACE)
  1087	
  1088		return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
  1089	}
  1090	
  1091	// ----------------------------------------------------------------------------
  1092	// Expressions
  1093	
  1094	func (p *parser) parseFuncTypeOrLit() ast.Expr {
  1095		if p.trace {
  1096			defer un(trace(p, "FuncTypeOrLit"))
  1097		}
  1098	
  1099		typ, scope := p.parseFuncType()
  1100		if p.tok != token.LBRACE {
  1101			// function type only
  1102			return typ
  1103		}
  1104	
  1105		p.exprLev++
  1106		body := p.parseBody(scope)
  1107		p.exprLev--
  1108	
  1109		return &ast.FuncLit{Type: typ, Body: body}
  1110	}
  1111	
  1112	// parseOperand may return an expression or a raw type (incl. array
  1113	// types of the form [...]T. Callers must verify the result.
  1114	// If lhs is set and the result is an identifier, it is not resolved.
  1115	//
  1116	func (p *parser) parseOperand(lhs bool) ast.Expr {
  1117		if p.trace {
  1118			defer un(trace(p, "Operand"))
  1119		}
  1120	
  1121		switch p.tok {
  1122		case token.IDENT:
  1123			x := p.parseIdent()
  1124			if !lhs {
  1125				p.resolve(x)
  1126			}
  1127			return x
  1128	
  1129		case token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING:
  1130			x := &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
  1131			p.next()
  1132			return x
  1133	
  1134		case token.LPAREN:
  1135			lparen := p.pos
  1136			p.next()
  1137			p.exprLev++
  1138			x := p.parseRhsOrType() // types may be parenthesized: (some type)
  1139			p.exprLev--
  1140			rparen := p.expect(token.RPAREN)
  1141			return &ast.ParenExpr{Lparen: lparen, X: x, Rparen: rparen}
  1142	
  1143		case token.FUNC:
  1144			return p.parseFuncTypeOrLit()
  1145		}
  1146	
  1147		if typ := p.tryIdentOrType(); typ != nil {
  1148			// could be type for composite literal or conversion
  1149			_, isIdent := typ.(*ast.Ident)
  1150			assert(!isIdent, "type cannot be identifier")
  1151			return typ
  1152		}
  1153	
  1154		// we have an error
  1155		pos := p.pos
  1156		p.errorExpected(pos, "operand")
  1157		syncStmt(p)
  1158		return &ast.BadExpr{From: pos, To: p.pos}
  1159	}
  1160	
  1161	func (p *parser) parseSelector(x ast.Expr) ast.Expr {
  1162		if p.trace {
  1163			defer un(trace(p, "Selector"))
  1164		}
  1165	
  1166		sel := p.parseIdent()
  1167	
  1168		return &ast.SelectorExpr{X: x, Sel: sel}
  1169	}
  1170	
  1171	func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr {
  1172		if p.trace {
  1173			defer un(trace(p, "TypeAssertion"))
  1174		}
  1175	
  1176		lparen := p.expect(token.LPAREN)
  1177		var typ ast.Expr
  1178		if p.tok == token.TYPE {
  1179			// type switch: typ == nil
  1180			p.next()
  1181		} else {
  1182			typ = p.parseType()
  1183		}
  1184		rparen := p.expect(token.RPAREN)
  1185	
  1186		return &ast.TypeAssertExpr{X: x, Type: typ, Lparen: lparen, Rparen: rparen}
  1187	}
  1188	
  1189	func (p *parser) parseIndexOrSlice(x ast.Expr) ast.Expr {
  1190		if p.trace {
  1191			defer un(trace(p, "IndexOrSlice"))
  1192		}
  1193	
  1194		const N = 3 // change the 3 to 2 to disable 3-index slices
  1195		lbrack := p.expect(token.LBRACK)
  1196		p.exprLev++
  1197		var index [N]ast.Expr
  1198		var colons [N - 1]token.Pos
  1199		if p.tok != token.COLON {
  1200			index[0] = p.parseRhs()
  1201		}
  1202		ncolons := 0
  1203		for p.tok == token.COLON && ncolons < len(colons) {
  1204			colons[ncolons] = p.pos
  1205			ncolons++
  1206			p.next()
  1207			if p.tok != token.COLON && p.tok != token.RBRACK && p.tok != token.EOF {
  1208				index[ncolons] = p.parseRhs()
  1209			}
  1210		}
  1211		p.exprLev--
  1212		rbrack := p.expect(token.RBRACK)
  1213	
  1214		if ncolons > 0 {
  1215			// slice expression
  1216			slice3 := false
  1217			if ncolons == 2 {
  1218				slice3 = true
  1219				// Check presence of 2nd and 3rd index here rather than during type-checking
  1220				// to prevent erroneous programs from passing through gofmt (was issue 7305).
  1221				if index[1] == nil {
  1222					p.error(colons[0], "2nd index required in 3-index slice")
  1223					index[1] = &ast.BadExpr{From: colons[0] + 1, To: colons[1]}
  1224				}
  1225				if index[2] == nil {
  1226					p.error(colons[1], "3rd index required in 3-index slice")
  1227					index[2] = &ast.BadExpr{From: colons[1] + 1, To: rbrack}
  1228				}
  1229			}
  1230			return &ast.SliceExpr{X: x, Lbrack: lbrack, Low: index[0], High: index[1], Max: index[2], Slice3: slice3, Rbrack: rbrack}
  1231		}
  1232	
  1233		return &ast.IndexExpr{X: x, Lbrack: lbrack, Index: index[0], Rbrack: rbrack}
  1234	}
  1235	
  1236	func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr {
  1237		if p.trace {
  1238			defer un(trace(p, "CallOrConversion"))
  1239		}
  1240	
  1241		lparen := p.expect(token.LPAREN)
  1242		p.exprLev++
  1243		var list []ast.Expr
  1244		var ellipsis token.Pos
  1245		for p.tok != token.RPAREN && p.tok != token.EOF && !ellipsis.IsValid() {
  1246			list = append(list, p.parseRhsOrType()) // builtins may expect a type: make(some type, ...)
  1247			if p.tok == token.ELLIPSIS {
  1248				ellipsis = p.pos
  1249				p.next()
  1250			}
  1251			if !p.atComma("argument list") {
  1252				break
  1253			}
  1254			p.next()
  1255		}
  1256		p.exprLev--
  1257		rparen := p.expectClosing(token.RPAREN, "argument list")
  1258	
  1259		return &ast.CallExpr{Fun: fun, Lparen: lparen, Args: list, Ellipsis: ellipsis, Rparen: rparen}
  1260	}
  1261	
  1262	func (p *parser) parseElement(keyOk bool) ast.Expr {
  1263		if p.trace {
  1264			defer un(trace(p, "Element"))
  1265		}
  1266	
  1267		if p.tok == token.LBRACE {
  1268			return p.parseLiteralValue(nil)
  1269		}
  1270	
  1271		// Because the parser doesn't know the composite literal type, it cannot
  1272		// know if a key that's an identifier is a struct field name or a name
  1273		// denoting a value. The former is not resolved by the parser or the
  1274		// resolver.
  1275		//
  1276		// Instead, _try_ to resolve such a key if possible. If it resolves,
  1277		// it a) has correctly resolved, or b) incorrectly resolved because
  1278		// the key is a struct field with a name matching another identifier.
  1279		// In the former case we are done, and in the latter case we don't
  1280		// care because the type checker will do a separate field lookup.
  1281		//
  1282		// If the key does not resolve, it a) must be defined at the top
  1283		// level in another file of the same package, the universe scope, or be
  1284		// undeclared; or b) it is a struct field. In the former case, the type
  1285		// checker can do a top-level lookup, and in the latter case it will do
  1286		// a separate field lookup.
  1287		x := p.checkExpr(p.parseExpr(keyOk))
  1288		if keyOk {
  1289			if p.tok == token.COLON {
  1290				colon := p.pos
  1291				p.next()
  1292				// Try to resolve the key but don't collect it
  1293				// as unresolved identifier if it fails so that
  1294				// we don't get (possibly false) errors about
  1295				// undeclared names.
  1296				p.tryResolve(x, false)
  1297				return &ast.KeyValueExpr{Key: x, Colon: colon, Value: p.parseElement(false)}
  1298			}
  1299			p.resolve(x) // not a key
  1300		}
  1301	
  1302		return x
  1303	}
  1304	
  1305	func (p *parser) parseElementList() (list []ast.Expr) {
  1306		if p.trace {
  1307			defer un(trace(p, "ElementList"))
  1308		}
  1309	
  1310		for p.tok != token.RBRACE && p.tok != token.EOF {
  1311			list = append(list, p.parseElement(true))
  1312			if !p.atComma("composite literal") {
  1313				break
  1314			}
  1315			p.next()
  1316		}
  1317	
  1318		return
  1319	}
  1320	
  1321	func (p *parser) parseLiteralValue(typ ast.Expr) ast.Expr {
  1322		if p.trace {
  1323			defer un(trace(p, "LiteralValue"))
  1324		}
  1325	
  1326		lbrace := p.expect(token.LBRACE)
  1327		var elts []ast.Expr
  1328		p.exprLev++
  1329		if p.tok != token.RBRACE {
  1330			elts = p.parseElementList()
  1331		}
  1332		p.exprLev--
  1333		rbrace := p.expectClosing(token.RBRACE, "composite literal")
  1334		return &ast.CompositeLit{Type: typ, Lbrace: lbrace, Elts: elts, Rbrace: rbrace}
  1335	}
  1336	
  1337	// checkExpr checks that x is an expression (and not a type).
  1338	func (p *parser) checkExpr(x ast.Expr) ast.Expr {
  1339		switch unparen(x).(type) {
  1340		case *ast.BadExpr:
  1341		case *ast.Ident:
  1342		case *ast.BasicLit:
  1343		case *ast.FuncLit:
  1344		case *ast.CompositeLit:
  1345		case *ast.ParenExpr:
  1346			panic("unreachable")
  1347		case *ast.SelectorExpr:
  1348		case *ast.IndexExpr:
  1349		case *ast.SliceExpr:
  1350		case *ast.TypeAssertExpr:
  1351			// If t.Type == nil we have a type assertion of the form
  1352			// y.(type), which is only allowed in type switch expressions.
  1353			// It's hard to exclude those but for the case where we are in
  1354			// a type switch. Instead be lenient and test this in the type
  1355			// checker.
  1356		case *ast.CallExpr:
  1357		case *ast.StarExpr:
  1358		case *ast.UnaryExpr:
  1359		case *ast.BinaryExpr:
  1360		default:
  1361			// all other nodes are not proper expressions
  1362			p.errorExpected(x.Pos(), "expression")
  1363			x = &ast.BadExpr{From: x.Pos(), To: p.safePos(x.End())}
  1364		}
  1365		return x
  1366	}
  1367	
  1368	// isTypeName returns true iff x is a (qualified) TypeName.
  1369	func isTypeName(x ast.Expr) bool {
  1370		switch t := x.(type) {
  1371		case *ast.BadExpr:
  1372		case *ast.Ident:
  1373		case *ast.SelectorExpr:
  1374			_, isIdent := t.X.(*ast.Ident)
  1375			return isIdent
  1376		default:
  1377			return false // all other nodes are not type names
  1378		}
  1379		return true
  1380	}
  1381	
  1382	// isLiteralType returns true iff x is a legal composite literal type.
  1383	func isLiteralType(x ast.Expr) bool {
  1384		switch t := x.(type) {
  1385		case *ast.BadExpr:
  1386		case *ast.Ident:
  1387		case *ast.SelectorExpr:
  1388			_, isIdent := t.X.(*ast.Ident)
  1389			return isIdent
  1390		case *ast.ArrayType:
  1391		case *ast.StructType:
  1392		case *ast.MapType:
  1393		default:
  1394			return false // all other nodes are not legal composite literal types
  1395		}
  1396		return true
  1397	}
  1398	
  1399	// If x is of the form *T, deref returns T, otherwise it returns x.
  1400	func deref(x ast.Expr) ast.Expr {
  1401		if p, isPtr := x.(*ast.StarExpr); isPtr {
  1402			x = p.X
  1403		}
  1404		return x
  1405	}
  1406	
  1407	// If x is of the form (T), unparen returns unparen(T), otherwise it returns x.
  1408	func unparen(x ast.Expr) ast.Expr {
  1409		if p, isParen := x.(*ast.ParenExpr); isParen {
  1410			x = unparen(p.X)
  1411		}
  1412		return x
  1413	}
  1414	
  1415	// checkExprOrType checks that x is an expression or a type
  1416	// (and not a raw type such as [...]T).
  1417	//
  1418	func (p *parser) checkExprOrType(x ast.Expr) ast.Expr {
  1419		switch t := unparen(x).(type) {
  1420		case *ast.ParenExpr:
  1421			panic("unreachable")
  1422		case *ast.UnaryExpr:
  1423		case *ast.ArrayType:
  1424			if len, isEllipsis := t.Len.(*ast.Ellipsis); isEllipsis {
  1425				p.error(len.Pos(), "expected array length, found '...'")
  1426				x = &ast.BadExpr{From: x.Pos(), To: p.safePos(x.End())}
  1427			}
  1428		}
  1429	
  1430		// all other nodes are expressions or types
  1431		return x
  1432	}
  1433	
  1434	// If lhs is set and the result is an identifier, it is not resolved.
  1435	func (p *parser) parsePrimaryExpr(lhs bool) ast.Expr {
  1436		if p.trace {
  1437			defer un(trace(p, "PrimaryExpr"))
  1438		}
  1439	
  1440		x := p.parseOperand(lhs)
  1441	L:
  1442		for {
  1443			switch p.tok {
  1444			case token.PERIOD:
  1445				p.next()
  1446				if lhs {
  1447					p.resolve(x)
  1448				}
  1449				switch p.tok {
  1450				case token.IDENT:
  1451					x = p.parseSelector(p.checkExprOrType(x))
  1452				case token.LPAREN:
  1453					x = p.parseTypeAssertion(p.checkExpr(x))
  1454				default:
  1455					pos := p.pos
  1456					p.errorExpected(pos, "selector or type assertion")
  1457					p.next() // make progress
  1458					x = &ast.BadExpr{From: pos, To: p.pos}
  1459				}
  1460			case token.LBRACK:
  1461				if lhs {
  1462					p.resolve(x)
  1463				}
  1464				x = p.parseIndexOrSlice(p.checkExpr(x))
  1465			case token.LPAREN:
  1466				if lhs {
  1467					p.resolve(x)
  1468				}
  1469				x = p.parseCallOrConversion(p.checkExprOrType(x))
  1470			case token.LBRACE:
  1471				if isLiteralType(x) && (p.exprLev >= 0 || !isTypeName(x)) {
  1472					if lhs {
  1473						p.resolve(x)
  1474					}
  1475					x = p.parseLiteralValue(x)
  1476				} else {
  1477					break L
  1478				}
  1479			default:
  1480				break L
  1481			}
  1482			lhs = false // no need to try to resolve again
  1483		}
  1484	
  1485		return x
  1486	}
  1487	
  1488	// If lhs is set and the result is an identifier, it is not resolved.
  1489	func (p *parser) parseUnaryExpr(lhs bool) ast.Expr {
  1490		if p.trace {
  1491			defer un(trace(p, "UnaryExpr"))
  1492		}
  1493	
  1494		switch p.tok {
  1495		case token.ADD, token.SUB, token.NOT, token.XOR, token.AND:
  1496			pos, op := p.pos, p.tok
  1497			p.next()
  1498			x := p.parseUnaryExpr(false)
  1499			return &ast.UnaryExpr{OpPos: pos, Op: op, X: p.checkExpr(x)}
  1500	
  1501		case token.ARROW:
  1502			// channel type or receive expression
  1503			arrow := p.pos
  1504			p.next()
  1505	
  1506			// If the next token is token.CHAN we still don't know if it
  1507			// is a channel type or a receive operation - we only know
  1508			// once we have found the end of the unary expression. There
  1509			// are two cases:
  1510			//
  1511			//   <- type  => (<-type) must be channel type
  1512			//   <- expr  => <-(expr) is a receive from an expression
  1513			//
  1514			// In the first case, the arrow must be re-associated with
  1515			// the channel type parsed already:
  1516			//
  1517			//   <- (chan type)    =>  (<-chan type)
  1518			//   <- (chan<- type)  =>  (<-chan (<-type))
  1519	
  1520			x := p.parseUnaryExpr(false)
  1521	
  1522			// determine which case we have
  1523			if typ, ok := x.(*ast.ChanType); ok {
  1524				// (<-type)
  1525	
  1526				// re-associate position info and <-
  1527				dir := ast.SEND
  1528				for ok && dir == ast.SEND {
  1529					if typ.Dir == ast.RECV {
  1530						// error: (<-type) is (<-(<-chan T))
  1531						p.errorExpected(typ.Arrow, "'chan'")
  1532					}
  1533					arrow, typ.Begin, typ.Arrow = typ.Arrow, arrow, arrow
  1534					dir, typ.Dir = typ.Dir, ast.RECV
  1535					typ, ok = typ.Value.(*ast.ChanType)
  1536				}
  1537				if dir == ast.SEND {
  1538					p.errorExpected(arrow, "channel type")
  1539				}
  1540	
  1541				return x
  1542			}
  1543	
  1544			// <-(expr)
  1545			return &ast.UnaryExpr{OpPos: arrow, Op: token.ARROW, X: p.checkExpr(x)}
  1546	
  1547		case token.MUL:
  1548			// pointer type or unary "*" expression
  1549			pos := p.pos
  1550			p.next()
  1551			x := p.parseUnaryExpr(false)
  1552			return &ast.StarExpr{Star: pos, X: p.checkExprOrType(x)}
  1553		}
  1554	
  1555		return p.parsePrimaryExpr(lhs)
  1556	}
  1557	
  1558	func (p *parser) tokPrec() (token.Token, int) {
  1559		tok := p.tok
  1560		if p.inRhs && tok == token.ASSIGN {
  1561			tok = token.EQL
  1562		}
  1563		return tok, tok.Precedence()
  1564	}
  1565	
  1566	// If lhs is set and the result is an identifier, it is not resolved.
  1567	func (p *parser) parseBinaryExpr(lhs bool, prec1 int) ast.Expr {
  1568		if p.trace {
  1569			defer un(trace(p, "BinaryExpr"))
  1570		}
  1571	
  1572		x := p.parseUnaryExpr(lhs)
  1573		for _, prec := p.tokPrec(); prec >= prec1; prec-- {
  1574			for {
  1575				op, oprec := p.tokPrec()
  1576				if oprec != prec {
  1577					break
  1578				}
  1579				pos := p.expect(op)
  1580				if lhs {
  1581					p.resolve(x)
  1582					lhs = false
  1583				}
  1584				y := p.parseBinaryExpr(false, prec+1)
  1585				x = &ast.BinaryExpr{X: p.checkExpr(x), OpPos: pos, Op: op, Y: p.checkExpr(y)}
  1586			}
  1587		}
  1588	
  1589		return x
  1590	}
  1591	
  1592	// If lhs is set and the result is an identifier, it is not resolved.
  1593	// The result may be a type or even a raw type ([...]int). Callers must
  1594	// check the result (using checkExpr or checkExprOrType), depending on
  1595	// context.
  1596	func (p *parser) parseExpr(lhs bool) ast.Expr {
  1597		if p.trace {
  1598			defer un(trace(p, "Expression"))
  1599		}
  1600	
  1601		return p.parseBinaryExpr(lhs, token.LowestPrec+1)
  1602	}
  1603	
  1604	func (p *parser) parseRhs() ast.Expr {
  1605		old := p.inRhs
  1606		p.inRhs = true
  1607		x := p.checkExpr(p.parseExpr(false))
  1608		p.inRhs = old
  1609		return x
  1610	}
  1611	
  1612	func (p *parser) parseRhsOrType() ast.Expr {
  1613		old := p.inRhs
  1614		p.inRhs = true
  1615		x := p.checkExprOrType(p.parseExpr(false))
  1616		p.inRhs = old
  1617		return x
  1618	}
  1619	
  1620	// ----------------------------------------------------------------------------
  1621	// Statements
  1622	
  1623	// Parsing modes for parseSimpleStmt.
  1624	const (
  1625		basic = iota
  1626		labelOk
  1627		rangeOk
  1628	)
  1629	
  1630	// parseSimpleStmt returns true as 2nd result if it parsed the assignment
  1631	// of a range clause (with mode == rangeOk). The returned statement is an
  1632	// assignment with a right-hand side that is a single unary expression of
  1633	// the form "range x". No guarantees are given for the left-hand side.
  1634	func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) {
  1635		if p.trace {
  1636			defer un(trace(p, "SimpleStmt"))
  1637		}
  1638	
  1639		x := p.parseLhsList()
  1640	
  1641		switch p.tok {
  1642		case
  1643			token.DEFINE, token.ASSIGN, token.ADD_ASSIGN,
  1644			token.SUB_ASSIGN, token.MUL_ASSIGN, token.QUO_ASSIGN,
  1645			token.REM_ASSIGN, token.AND_ASSIGN, token.OR_ASSIGN,
  1646			token.XOR_ASSIGN, token.SHL_ASSIGN, token.SHR_ASSIGN, token.AND_NOT_ASSIGN:
  1647			// assignment statement, possibly part of a range clause
  1648			pos, tok := p.pos, p.tok
  1649			p.next()
  1650			var y []ast.Expr
  1651			isRange := false
  1652			if mode == rangeOk && p.tok == token.RANGE && (tok == token.DEFINE || tok == token.ASSIGN) {
  1653				pos := p.pos
  1654				p.next()
  1655				y = []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
  1656				isRange = true
  1657			} else {
  1658				y = p.parseRhsList()
  1659			}
  1660			as := &ast.AssignStmt{Lhs: x, TokPos: pos, Tok: tok, Rhs: y}
  1661			if tok == token.DEFINE {
  1662				p.shortVarDecl(as, x)
  1663			}
  1664			return as, isRange
  1665		}
  1666	
  1667		if len(x) > 1 {
  1668			p.errorExpected(x[0].Pos(), "1 expression")
  1669			// continue with first expression
  1670		}
  1671	
  1672		switch p.tok {
  1673		case token.COLON:
  1674			// labeled statement
  1675			colon := p.pos
  1676			p.next()
  1677			if label, isIdent := x[0].(*ast.Ident); mode == labelOk && isIdent {
  1678				// Go spec: The scope of a label is the body of the function
  1679				// in which it is declared and excludes the body of any nested
  1680				// function.
  1681				stmt := &ast.LabeledStmt{Label: label, Colon: colon, Stmt: p.parseStmt()}
  1682				p.declare(stmt, nil, p.labelScope, ast.Lbl, label)
  1683				return stmt, false
  1684			}
  1685			// The label declaration typically starts at x[0].Pos(), but the label
  1686			// declaration may be erroneous due to a token after that position (and
  1687			// before the ':'). If SpuriousErrors is not set, the (only) error re-
  1688			// ported for the line is the illegal label error instead of the token
  1689			// before the ':' that caused the problem. Thus, use the (latest) colon
  1690			// position for error reporting.
  1691			p.error(colon, "illegal label declaration")
  1692			return &ast.BadStmt{From: x[0].Pos(), To: colon + 1}, false
  1693	
  1694		case token.ARROW:
  1695			// send statement
  1696			arrow := p.pos
  1697			p.next()
  1698			y := p.parseRhs()
  1699			return &ast.SendStmt{Chan: x[0], Arrow: arrow, Value: y}, false
  1700	
  1701		case token.INC, token.DEC:
  1702			// increment or decrement
  1703			s := &ast.IncDecStmt{X: x[0], TokPos: p.pos, Tok: p.tok}
  1704			p.next()
  1705			return s, false
  1706		}
  1707	
  1708		// expression
  1709		return &ast.ExprStmt{X: x[0]}, false
  1710	}
  1711	
  1712	func (p *parser) parseCallExpr(callType string) *ast.CallExpr {
  1713		x := p.parseRhsOrType() // could be a conversion: (some type)(x)
  1714		if call, isCall := x.(*ast.CallExpr); isCall {
  1715			return call
  1716		}
  1717		if _, isBad := x.(*ast.BadExpr); !isBad {
  1718			// only report error if it's a new one
  1719			p.error(p.safePos(x.End()), fmt.Sprintf("function must be invoked in %s statement", callType))
  1720		}
  1721		return nil
  1722	}
  1723	
  1724	func (p *parser) parseGoStmt() ast.Stmt {
  1725		if p.trace {
  1726			defer un(trace(p, "GoStmt"))
  1727		}
  1728	
  1729		pos := p.expect(token.GO)
  1730		call := p.parseCallExpr("go")
  1731		p.expectSemi()
  1732		if call == nil {
  1733			return &ast.BadStmt{From: pos, To: pos + 2} // len("go")
  1734		}
  1735	
  1736		return &ast.GoStmt{Go: pos, Call: call}
  1737	}
  1738	
  1739	func (p *parser) parseDeferStmt() ast.Stmt {
  1740		if p.trace {
  1741			defer un(trace(p, "DeferStmt"))
  1742		}
  1743	
  1744		pos := p.expect(token.DEFER)
  1745		call := p.parseCallExpr("defer")
  1746		p.expectSemi()
  1747		if call == nil {
  1748			return &ast.BadStmt{From: pos, To: pos + 5} // len("defer")
  1749		}
  1750	
  1751		return &ast.DeferStmt{Defer: pos, Call: call}
  1752	}
  1753	
  1754	func (p *parser) parseReturnStmt() *ast.ReturnStmt {
  1755		if p.trace {
  1756			defer un(trace(p, "ReturnStmt"))
  1757		}
  1758	
  1759		pos := p.pos
  1760		p.expect(token.RETURN)
  1761		var x []ast.Expr
  1762		if p.tok != token.SEMICOLON && p.tok != token.RBRACE {
  1763			x = p.parseRhsList()
  1764		}
  1765		p.expectSemi()
  1766	
  1767		return &ast.ReturnStmt{Return: pos, Results: x}
  1768	}
  1769	
  1770	func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt {
  1771		if p.trace {
  1772			defer un(trace(p, "BranchStmt"))
  1773		}
  1774	
  1775		pos := p.expect(tok)
  1776		var label *ast.Ident
  1777		if tok != token.FALLTHROUGH && p.tok == token.IDENT {
  1778			label = p.parseIdent()
  1779			// add to list of unresolved targets
  1780			n := len(p.targetStack) - 1
  1781			p.targetStack[n] = append(p.targetStack[n], label)
  1782		}
  1783		p.expectSemi()
  1784	
  1785		return &ast.BranchStmt{TokPos: pos, Tok: tok, Label: label}
  1786	}
  1787	
  1788	func (p *parser) makeExpr(s ast.Stmt, kind string) ast.Expr {
  1789		if s == nil {
  1790			return nil
  1791		}
  1792		if es, isExpr := s.(*ast.ExprStmt); isExpr {
  1793			return p.checkExpr(es.X)
  1794		}
  1795		p.error(s.Pos(), fmt.Sprintf("expected %s, found simple statement (missing parentheses around composite literal?)", kind))
  1796		return &ast.BadExpr{From: s.Pos(), To: p.safePos(s.End())}
  1797	}
  1798	
  1799	func (p *parser) parseIfStmt() *ast.IfStmt {
  1800		if p.trace {
  1801			defer un(trace(p, "IfStmt"))
  1802		}
  1803	
  1804		pos := p.expect(token.IF)
  1805		p.openScope()
  1806		defer p.closeScope()
  1807	
  1808		var s ast.Stmt
  1809		var x ast.Expr
  1810		{
  1811			prevLev := p.exprLev
  1812			p.exprLev = -1
  1813			if p.tok == token.SEMICOLON {
  1814				p.next()
  1815				x = p.parseRhs()
  1816			} else {
  1817				s, _ = p.parseSimpleStmt(basic)
  1818				if p.tok == token.SEMICOLON {
  1819					p.next()
  1820					x = p.parseRhs()
  1821				} else {
  1822					x = p.makeExpr(s, "boolean expression")
  1823					s = nil
  1824				}
  1825			}
  1826			p.exprLev = prevLev
  1827		}
  1828	
  1829		body := p.parseBlockStmt()
  1830		var else_ ast.Stmt
  1831		if p.tok == token.ELSE {
  1832			p.next()
  1833			else_ = p.parseStmt()
  1834		} else {
  1835			p.expectSemi()
  1836		}
  1837	
  1838		return &ast.IfStmt{If: pos, Init: s, Cond: x, Body: body, Else: else_}
  1839	}
  1840	
  1841	func (p *parser) parseTypeList() (list []ast.Expr) {
  1842		if p.trace {
  1843			defer un(trace(p, "TypeList"))
  1844		}
  1845	
  1846		list = append(list, p.parseType())
  1847		for p.tok == token.COMMA {
  1848			p.next()
  1849			list = append(list, p.parseType())
  1850		}
  1851	
  1852		return
  1853	}
  1854	
  1855	func (p *parser) parseCaseClause(typeSwitch bool) *ast.CaseClause {
  1856		if p.trace {
  1857			defer un(trace(p, "CaseClause"))
  1858		}
  1859	
  1860		pos := p.pos
  1861		var list []ast.Expr
  1862		if p.tok == token.CASE {
  1863			p.next()
  1864			if typeSwitch {
  1865				list = p.parseTypeList()
  1866			} else {
  1867				list = p.parseRhsList()
  1868			}
  1869		} else {
  1870			p.expect(token.DEFAULT)
  1871		}
  1872	
  1873		colon := p.expect(token.COLON)
  1874		p.openScope()
  1875		body := p.parseStmtList()
  1876		p.closeScope()
  1877	
  1878		return &ast.CaseClause{Case: pos, List: list, Colon: colon, Body: body}
  1879	}
  1880	
  1881	func isTypeSwitchAssert(x ast.Expr) bool {
  1882		a, ok := x.(*ast.TypeAssertExpr)
  1883		return ok && a.Type == nil
  1884	}
  1885	
  1886	func isTypeSwitchGuard(s ast.Stmt) bool {
  1887		switch t := s.(type) {
  1888		case *ast.ExprStmt:
  1889			// x.(nil)
  1890			return isTypeSwitchAssert(t.X)
  1891		case *ast.AssignStmt:
  1892			// v := x.(nil)
  1893			return len(t.Lhs) == 1 && t.Tok == token.DEFINE && len(t.Rhs) == 1 && isTypeSwitchAssert(t.Rhs[0])
  1894		}
  1895		return false
  1896	}
  1897	
  1898	func (p *parser) parseSwitchStmt() ast.Stmt {
  1899		if p.trace {
  1900			defer un(trace(p, "SwitchStmt"))
  1901		}
  1902	
  1903		pos := p.expect(token.SWITCH)
  1904		p.openScope()
  1905		defer p.closeScope()
  1906	
  1907		var s1, s2 ast.Stmt
  1908		if p.tok != token.LBRACE {
  1909			prevLev := p.exprLev
  1910			p.exprLev = -1
  1911			if p.tok != token.SEMICOLON {
  1912				s2, _ = p.parseSimpleStmt(basic)
  1913			}
  1914			if p.tok == token.SEMICOLON {
  1915				p.next()
  1916				s1 = s2
  1917				s2 = nil
  1918				if p.tok != token.LBRACE {
  1919					// A TypeSwitchGuard may declare a variable in addition
  1920					// to the variable declared in the initial SimpleStmt.
  1921					// Introduce extra scope to avoid redeclaration errors:
  1922					//
  1923					//	switch t := 0; t := x.(T) { ... }
  1924					//
  1925					// (this code is not valid Go because the first t
  1926					// cannot be accessed and thus is never used, the extra
  1927					// scope is needed for the correct error message).
  1928					//
  1929					// If we don't have a type switch, s2 must be an expression.
  1930					// Having the extra nested but empty scope won't affect it.
  1931					p.openScope()
  1932					defer p.closeScope()
  1933					s2, _ = p.parseSimpleStmt(basic)
  1934				}
  1935			}
  1936			p.exprLev = prevLev
  1937		}
  1938	
  1939		typeSwitch := isTypeSwitchGuard(s2)
  1940		lbrace := p.expect(token.LBRACE)
  1941		var list []ast.Stmt
  1942		for p.tok == token.CASE || p.tok == token.DEFAULT {
  1943			list = append(list, p.parseCaseClause(typeSwitch))
  1944		}
  1945		rbrace := p.expect(token.RBRACE)
  1946		p.expectSemi()
  1947		body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
  1948	
  1949		if typeSwitch {
  1950			return &ast.TypeSwitchStmt{Switch: pos, Init: s1, Assign: s2, Body: body}
  1951		}
  1952	
  1953		return &ast.SwitchStmt{Switch: pos, Init: s1, Tag: p.makeExpr(s2, "switch expression"), Body: body}
  1954	}
  1955	
  1956	func (p *parser) parseCommClause() *ast.CommClause {
  1957		if p.trace {
  1958			defer un(trace(p, "CommClause"))
  1959		}
  1960	
  1961		p.openScope()
  1962		pos := p.pos
  1963		var comm ast.Stmt
  1964		if p.tok == token.CASE {
  1965			p.next()
  1966			lhs := p.parseLhsList()
  1967			if p.tok == token.ARROW {
  1968				// SendStmt
  1969				if len(lhs) > 1 {
  1970					p.errorExpected(lhs[0].Pos(), "1 expression")
  1971					// continue with first expression
  1972				}
  1973				arrow := p.pos
  1974				p.next()
  1975				rhs := p.parseRhs()
  1976				comm = &ast.SendStmt{Chan: lhs[0], Arrow: arrow, Value: rhs}
  1977			} else {
  1978				// RecvStmt
  1979				if tok := p.tok; tok == token.ASSIGN || tok == token.DEFINE {
  1980					// RecvStmt with assignment
  1981					if len(lhs) > 2 {
  1982						p.errorExpected(lhs[0].Pos(), "1 or 2 expressions")
  1983						// continue with first two expressions
  1984						lhs = lhs[0:2]
  1985					}
  1986					pos := p.pos
  1987					p.next()
  1988					rhs := p.parseRhs()
  1989					as := &ast.AssignStmt{Lhs: lhs, TokPos: pos, Tok: tok, Rhs: []ast.Expr{rhs}}
  1990					if tok == token.DEFINE {
  1991						p.shortVarDecl(as, lhs)
  1992					}
  1993					comm = as
  1994				} else {
  1995					// lhs must be single receive operation
  1996					if len(lhs) > 1 {
  1997						p.errorExpected(lhs[0].Pos(), "1 expression")
  1998						// continue with first expression
  1999					}
  2000					comm = &ast.ExprStmt{X: lhs[0]}
  2001				}
  2002			}
  2003		} else {
  2004			p.expect(token.DEFAULT)
  2005		}
  2006	
  2007		colon := p.expect(token.COLON)
  2008		body := p.parseStmtList()
  2009		p.closeScope()
  2010	
  2011		return &ast.CommClause{Case: pos, Comm: comm, Colon: colon, Body: body}
  2012	}
  2013	
  2014	func (p *parser) parseSelectStmt() *ast.SelectStmt {
  2015		if p.trace {
  2016			defer un(trace(p, "SelectStmt"))
  2017		}
  2018	
  2019		pos := p.expect(token.SELECT)
  2020		lbrace := p.expect(token.LBRACE)
  2021		var list []ast.Stmt
  2022		for p.tok == token.CASE || p.tok == token.DEFAULT {
  2023			list = append(list, p.parseCommClause())
  2024		}
  2025		rbrace := p.expect(token.RBRACE)
  2026		p.expectSemi()
  2027		body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
  2028	
  2029		return &ast.SelectStmt{Select: pos, Body: body}
  2030	}
  2031	
  2032	func (p *parser) parseForStmt() ast.Stmt {
  2033		if p.trace {
  2034			defer un(trace(p, "ForStmt"))
  2035		}
  2036	
  2037		pos := p.expect(token.FOR)
  2038		p.openScope()
  2039		defer p.closeScope()
  2040	
  2041		var s1, s2, s3 ast.Stmt
  2042		var isRange bool
  2043		if p.tok != token.LBRACE {
  2044			prevLev := p.exprLev
  2045			p.exprLev = -1
  2046			if p.tok != token.SEMICOLON {
  2047				if p.tok == token.RANGE {
  2048					// "for range x" (nil lhs in assignment)
  2049					pos := p.pos
  2050					p.next()
  2051					y := []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
  2052					s2 = &ast.AssignStmt{Rhs: y}
  2053					isRange = true
  2054				} else {
  2055					s2, isRange = p.parseSimpleStmt(rangeOk)
  2056				}
  2057			}
  2058			if !isRange && p.tok == token.SEMICOLON {
  2059				p.next()
  2060				s1 = s2
  2061				s2 = nil
  2062				if p.tok != token.SEMICOLON {
  2063					s2, _ = p.parseSimpleStmt(basic)
  2064				}
  2065				p.expectSemi()
  2066				if p.tok != token.LBRACE {
  2067					s3, _ = p.parseSimpleStmt(basic)
  2068				}
  2069			}
  2070			p.exprLev = prevLev
  2071		}
  2072	
  2073		body := p.parseBlockStmt()
  2074		p.expectSemi()
  2075	
  2076		if isRange {
  2077			as := s2.(*ast.AssignStmt)
  2078			// check lhs
  2079			var key, value ast.Expr
  2080			switch len(as.Lhs) {
  2081			case 0:
  2082				// nothing to do
  2083			case 1:
  2084				key = as.Lhs[0]
  2085			case 2:
  2086				key, value = as.Lhs[0], as.Lhs[1]
  2087			default:
  2088				p.errorExpected(as.Lhs[len(as.Lhs)-1].Pos(), "at most 2 expressions")
  2089				return &ast.BadStmt{From: pos, To: p.safePos(body.End())}
  2090			}
  2091			// parseSimpleStmt returned a right-hand side that
  2092			// is a single unary expression of the form "range x"
  2093			x := as.Rhs[0].(*ast.UnaryExpr).X
  2094			return &ast.RangeStmt{
  2095				For:    pos,
  2096				Key:    key,
  2097				Value:  value,
  2098				TokPos: as.TokPos,
  2099				Tok:    as.Tok,
  2100				X:      x,
  2101				Body:   body,
  2102			}
  2103		}
  2104	
  2105		// regular for statement
  2106		return &ast.ForStmt{
  2107			For:  pos,
  2108			Init: s1,
  2109			Cond: p.makeExpr(s2, "boolean or range expression"),
  2110			Post: s3,
  2111			Body: body,
  2112		}
  2113	}
  2114	
  2115	func (p *parser) parseStmt() (s ast.Stmt) {
  2116		if p.trace {
  2117			defer un(trace(p, "Statement"))
  2118		}
  2119	
  2120		switch p.tok {
  2121		case token.CONST, token.TYPE, token.VAR:
  2122			s = &ast.DeclStmt{Decl: p.parseDecl(syncStmt)}
  2123		case
  2124			// tokens that may start an expression
  2125			token.IDENT, token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING, token.FUNC, token.LPAREN, // operands
  2126			token.LBRACK, token.STRUCT, // composite types
  2127			token.ADD, token.SUB, token.MUL, token.AND, token.XOR, token.ARROW, token.NOT: // unary operators
  2128			s, _ = p.parseSimpleStmt(labelOk)
  2129			// because of the required look-ahead, labeled statements are
  2130			// parsed by parseSimpleStmt - don't expect a semicolon after
  2131			// them
  2132			if _, isLabeledStmt := s.(*ast.LabeledStmt); !isLabeledStmt {
  2133				p.expectSemi()
  2134			}
  2135		case token.GO:
  2136			s = p.parseGoStmt()
  2137		case token.DEFER:
  2138			s = p.parseDeferStmt()
  2139		case token.RETURN:
  2140			s = p.parseReturnStmt()
  2141		case token.BREAK, token.CONTINUE, token.GOTO, token.FALLTHROUGH:
  2142			s = p.parseBranchStmt(p.tok)
  2143		case token.LBRACE:
  2144			s = p.parseBlockStmt()
  2145			p.expectSemi()
  2146		case token.IF:
  2147			s = p.parseIfStmt()
  2148		case token.SWITCH:
  2149			s = p.parseSwitchStmt()
  2150		case token.SELECT:
  2151			s = p.parseSelectStmt()
  2152		case token.FOR:
  2153			s = p.parseForStmt()
  2154		case token.SEMICOLON:
  2155			s = &ast.EmptyStmt{Semicolon: p.pos}
  2156			p.next()
  2157		case token.RBRACE:
  2158			// a semicolon may be omitted before a closing "}"
  2159			s = &ast.EmptyStmt{Semicolon: p.pos}
  2160		default:
  2161			// no statement found
  2162			pos := p.pos
  2163			p.errorExpected(pos, "statement")
  2164			syncStmt(p)
  2165			s = &ast.BadStmt{From: pos, To: p.pos}
  2166		}
  2167	
  2168		return
  2169	}
  2170	
  2171	// ----------------------------------------------------------------------------
  2172	// Declarations
  2173	
  2174	type parseSpecFunction func(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec
  2175	
  2176	func isValidImport(lit string) bool {
  2177		const illegalChars = `!"#$%&'()*,:;<=>?[\]^{|}` + "`\uFFFD"
  2178		s, _ := strconv.Unquote(lit) // go/scanner returns a legal string literal
  2179		for _, r := range s {
  2180			if !unicode.IsGraphic(r) || unicode.IsSpace(r) || strings.ContainsRune(illegalChars, r) {
  2181				return false
  2182			}
  2183		}
  2184		return s != ""
  2185	}
  2186	
  2187	func (p *parser) parseImportSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
  2188		if p.trace {
  2189			defer un(trace(p, "ImportSpec"))
  2190		}
  2191	
  2192		var ident *ast.Ident
  2193		switch p.tok {
  2194		case token.PERIOD:
  2195			ident = &ast.Ident{NamePos: p.pos, Name: "."}
  2196			p.next()
  2197		case token.IDENT:
  2198			ident = p.parseIdent()
  2199		}
  2200	
  2201		pos := p.pos
  2202		var path string
  2203		if p.tok == token.STRING {
  2204			path = p.lit
  2205			if !isValidImport(path) {
  2206				p.error(pos, "invalid import path: "+path)
  2207			}
  2208			p.next()
  2209		} else {
  2210			p.expect(token.STRING) // use expect() error handling
  2211		}
  2212		p.expectSemi() // call before accessing p.linecomment
  2213	
  2214		// collect imports
  2215		spec := &ast.ImportSpec{
  2216			Doc:     doc,
  2217			Name:    ident,
  2218			Path:    &ast.BasicLit{ValuePos: pos, Kind: token.STRING, Value: path},
  2219			Comment: p.lineComment,
  2220		}
  2221		p.imports = append(p.imports, spec)
  2222	
  2223		return spec
  2224	}
  2225	
  2226	func (p *parser) parseValueSpec(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec {
  2227		if p.trace {
  2228			defer un(trace(p, keyword.String()+"Spec"))
  2229		}
  2230	
  2231		idents := p.parseIdentList()
  2232		typ := p.tryType()
  2233		var values []ast.Expr
  2234		// always permit optional initialization for more tolerant parsing
  2235		if p.tok == token.ASSIGN {
  2236			p.next()
  2237			values = p.parseRhsList()
  2238		}
  2239		p.expectSemi() // call before accessing p.linecomment
  2240	
  2241		// Go spec: The scope of a constant or variable identifier declared inside
  2242		// a function begins at the end of the ConstSpec or VarSpec and ends at
  2243		// the end of the innermost containing block.
  2244		// (Global identifiers are resolved in a separate phase after parsing.)
  2245		spec := &ast.ValueSpec{
  2246			Doc:     doc,
  2247			Names:   idents,
  2248			Type:    typ,
  2249			Values:  values,
  2250			Comment: p.lineComment,
  2251		}
  2252		kind := ast.Con
  2253		if keyword == token.VAR {
  2254			kind = ast.Var
  2255		}
  2256		p.declare(spec, iota, p.topScope, kind, idents...)
  2257	
  2258		return spec
  2259	}
  2260	
  2261	func (p *parser) parseTypeSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
  2262		if p.trace {
  2263			defer un(trace(p, "TypeSpec"))
  2264		}
  2265	
  2266		ident := p.parseIdent()
  2267	
  2268		// Go spec: The scope of a type identifier declared inside a function begins
  2269		// at the identifier in the TypeSpec and ends at the end of the innermost
  2270		// containing block.
  2271		// (Global identifiers are resolved in a separate phase after parsing.)
  2272		spec := &ast.TypeSpec{Doc: doc, Name: ident}
  2273		p.declare(spec, nil, p.topScope, ast.Typ, ident)
  2274	
  2275		spec.Type = p.parseType()
  2276		p.expectSemi() // call before accessing p.linecomment
  2277		spec.Comment = p.lineComment
  2278	
  2279		return spec
  2280	}
  2281	
  2282	func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl {
  2283		if p.trace {
  2284			defer un(trace(p, "GenDecl("+keyword.String()+")"))
  2285		}
  2286	
  2287		doc := p.leadComment
  2288		pos := p.expect(keyword)
  2289		var lparen, rparen token.Pos
  2290		var list []ast.Spec
  2291		if p.tok == token.LPAREN {
  2292			lparen = p.pos
  2293			p.next()
  2294			for iota := 0; p.tok != token.RPAREN && p.tok != token.EOF; iota++ {
  2295				list = append(list, f(p.leadComment, keyword, iota))
  2296			}
  2297			rparen = p.expect(token.RPAREN)
  2298			p.expectSemi()
  2299		} else {
  2300			list = append(list, f(nil, keyword, 0))
  2301		}
  2302	
  2303		return &ast.GenDecl{
  2304			Doc:    doc,
  2305			TokPos: pos,
  2306			Tok:    keyword,
  2307			Lparen: lparen,
  2308			Specs:  list,
  2309			Rparen: rparen,
  2310		}
  2311	}
  2312	
  2313	func (p *parser) parseFuncDecl() *ast.FuncDecl {
  2314		if p.trace {
  2315			defer un(trace(p, "FunctionDecl"))
  2316		}
  2317	
  2318		doc := p.leadComment
  2319		pos := p.expect(token.FUNC)
  2320		scope := ast.NewScope(p.topScope) // function scope
  2321	
  2322		var recv *ast.FieldList
  2323		if p.tok == token.LPAREN {
  2324			recv = p.parseParameters(scope, false)
  2325		}
  2326	
  2327		ident := p.parseIdent()
  2328	
  2329		params, results := p.parseSignature(scope)
  2330	
  2331		var body *ast.BlockStmt
  2332		if p.tok == token.LBRACE {
  2333			body = p.parseBody(scope)
  2334		}
  2335		p.expectSemi()
  2336	
  2337		decl := &ast.FuncDecl{
  2338			Doc:  doc,
  2339			Recv: recv,
  2340			Name: ident,
  2341			Type: &ast.FuncType{
  2342				Func:    pos,
  2343				Params:  params,
  2344				Results: results,
  2345			},
  2346			Body: body,
  2347		}
  2348		if recv == nil {
  2349			// Go spec: The scope of an identifier denoting a constant, type,
  2350			// variable, or function (but not method) declared at top level
  2351			// (outside any function) is the package block.
  2352			//
  2353			// init() functions cannot be referred to and there may
  2354			// be more than one - don't put them in the pkgScope
  2355			if ident.Name != "init" {
  2356				p.declare(decl, nil, p.pkgScope, ast.Fun, ident)
  2357			}
  2358		}
  2359	
  2360		return decl
  2361	}
  2362	
  2363	func (p *parser) parseDecl(sync func(*parser)) ast.Decl {
  2364		if p.trace {
  2365			defer un(trace(p, "Declaration"))
  2366		}
  2367	
  2368		var f parseSpecFunction
  2369		switch p.tok {
  2370		case token.CONST, token.VAR:
  2371			f = p.parseValueSpec
  2372	
  2373		case token.TYPE:
  2374			f = p.parseTypeSpec
  2375	
  2376		case token.FUNC:
  2377			return p.parseFuncDecl()
  2378	
  2379		default:
  2380			pos := p.pos
  2381			p.errorExpected(pos, "declaration")
  2382			sync(p)
  2383			return &ast.BadDecl{From: pos, To: p.pos}
  2384		}
  2385	
  2386		return p.parseGenDecl(p.tok, f)
  2387	}
  2388	
  2389	// ----------------------------------------------------------------------------
  2390	// Source files
  2391	
  2392	func (p *parser) parseFile() *ast.File {
  2393		if p.trace {
  2394			defer un(trace(p, "File"))
  2395		}
  2396	
  2397		// Don't bother parsing the rest if we had errors scanning the first token.
  2398		// Likely not a Go source file at all.
  2399		if p.errors.Len() != 0 {
  2400			return nil
  2401		}
  2402	
  2403		// package clause
  2404		doc := p.leadComment
  2405		pos := p.expect(token.PACKAGE)
  2406		// Go spec: The package clause is not a declaration;
  2407		// the package name does not appear in any scope.
  2408		ident := p.parseIdent()
  2409		if ident.Name == "_" && p.mode&DeclarationErrors != 0 {
  2410			p.error(p.pos, "invalid package name _")
  2411		}
  2412		p.expectSemi()
  2413	
  2414		// Don't bother parsing the rest if we had errors parsing the package clause.
  2415		// Likely not a Go source file at all.
  2416		if p.errors.Len() != 0 {
  2417			return nil
  2418		}
  2419	
  2420		p.openScope()
  2421		p.pkgScope = p.topScope
  2422		var decls []ast.Decl
  2423		if p.mode&PackageClauseOnly == 0 {
  2424			// import decls
  2425			for p.tok == token.IMPORT {
  2426				decls = append(decls, p.parseGenDecl(token.IMPORT, p.parseImportSpec))
  2427			}
  2428	
  2429			if p.mode&ImportsOnly == 0 {
  2430				// rest of package body
  2431				for p.tok != token.EOF {
  2432					decls = append(decls, p.parseDecl(syncDecl))
  2433				}
  2434			}
  2435		}
  2436		p.closeScope()
  2437		assert(p.topScope == nil, "unbalanced scopes")
  2438		assert(p.labelScope == nil, "unbalanced label scopes")
  2439	
  2440		// resolve global identifiers within the same file
  2441		i := 0
  2442		for _, ident := range p.unresolved {
  2443			// i <= index for current ident
  2444			assert(ident.Obj == unresolved, "object already resolved")
  2445			ident.Obj = p.pkgScope.Lookup(ident.Name) // also removes unresolved sentinel
  2446			if ident.Obj == nil {
  2447				p.unresolved[i] = ident
  2448				i++
  2449			}
  2450		}
  2451	
  2452		return &ast.File{
  2453			Doc:        doc,
  2454			Package:    pos,
  2455			Name:       ident,
  2456			Decls:      decls,
  2457			Scope:      p.pkgScope,
  2458			Imports:    p.imports,
  2459			Unresolved: p.unresolved[0:i],
  2460			Comments:   p.comments,
  2461		}
  2462	}
  2463	

View as plain text