summaryrefslogtreecommitdiff
path: root/src/pkg/go/printer/testdata/parser.go
diff options
context:
space:
mode:
Diffstat (limited to 'src/pkg/go/printer/testdata/parser.go')
-rw-r--r--src/pkg/go/printer/testdata/parser.go2153
1 files changed, 0 insertions, 2153 deletions
diff --git a/src/pkg/go/printer/testdata/parser.go b/src/pkg/go/printer/testdata/parser.go
deleted file mode 100644
index dba8bbd43..000000000
--- a/src/pkg/go/printer/testdata/parser.go
+++ /dev/null
@@ -1,2153 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package parser implements a parser for Go source files. Input may be
-// provided in a variety of forms (see the various Parse* functions); the
-// output is an abstract syntax tree (AST) representing the Go source. The
-// parser is invoked through one of the Parse* functions.
-
-package parser
-
-import (
- "fmt"
- "go/ast"
- "go/scanner"
- "go/token"
-)
-
-// The mode parameter to the Parse* functions is a set of flags (or 0).
-// They control the amount of source code parsed and other optional
-// parser functionality.
-//
-const (
- PackageClauseOnly uint = 1 << iota // parsing stops after package clause
- ImportsOnly // parsing stops after import declarations
- ParseComments // parse comments and add them to AST
- Trace // print a trace of parsed productions
- DeclarationErrors // report declaration errors
-)
-
-// The parser structure holds the parser's internal state.
-type parser struct {
- file *token.File
- scanner.ErrorVector
- scanner scanner.Scanner
-
- // Tracing/debugging
- mode uint // parsing mode
- trace bool // == (mode & Trace != 0)
- indent uint // indentation used for tracing output
-
- // Comments
- comments []*ast.CommentGroup
- leadComment *ast.CommentGroup // last lead comment
- lineComment *ast.CommentGroup // last line comment
-
- // Next token
- pos token.Pos // token position
- tok token.Token // one token look-ahead
- lit string // token literal
-
- // Non-syntactic parser control
- exprLev int // < 0: in control clause, >= 0: in expression
-
- // Ordinary identifier scopes
- pkgScope *ast.Scope // pkgScope.Outer == nil
- topScope *ast.Scope // top-most scope; may be pkgScope
- unresolved []*ast.Ident // unresolved identifiers
- imports []*ast.ImportSpec // list of imports
-
- // Label scope
- // (maintained by open/close LabelScope)
- labelScope *ast.Scope // label scope for current function
- targetStack [][]*ast.Ident // stack of unresolved labels
-}
-
-// scannerMode returns the scanner mode bits given the parser's mode bits.
-func scannerMode(mode uint) uint {
- var m uint = scanner.InsertSemis
- if mode&ParseComments != 0 {
- m |= scanner.ScanComments
- }
- return m
-}
-
-func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode uint) {
- p.file = fset.AddFile(filename, fset.Base(), len(src))
- p.scanner.Init(p.file, src, p, scannerMode(mode))
-
- p.mode = mode
- p.trace = mode&Trace != 0 // for convenience (p.trace is used frequently)
-
- p.next()
-
- // set up the pkgScope here (as opposed to in parseFile) because
- // there are other parser entry points (ParseExpr, etc.)
- p.openScope()
- p.pkgScope = p.topScope
-
- // for the same reason, set up a label scope
- p.openLabelScope()
-}
-
-// ----------------------------------------------------------------------------
-// Scoping support
-
-func (p *parser) openScope() {
- p.topScope = ast.NewScope(p.topScope)
-}
-
-func (p *parser) closeScope() {
- p.topScope = p.topScope.Outer
-}
-
-func (p *parser) openLabelScope() {
- p.labelScope = ast.NewScope(p.labelScope)
- p.targetStack = append(p.targetStack, nil)
-}
-
-func (p *parser) closeLabelScope() {
- // resolve labels
- n := len(p.targetStack) - 1
- scope := p.labelScope
- for _, ident := range p.targetStack[n] {
- ident.Obj = scope.Lookup(ident.Name)
- if ident.Obj == nil && p.mode&DeclarationErrors != 0 {
- p.error(ident.Pos(), fmt.Sprintf("label %s undefined", ident.Name))
- }
- }
- // pop label scope
- p.targetStack = p.targetStack[0:n]
- p.labelScope = p.labelScope.Outer
-}
-
-func (p *parser) declare(decl interface{}, scope *ast.Scope, kind ast.ObjKind, idents ...*ast.Ident) {
- for _, ident := range idents {
- assert(ident.Obj == nil, "identifier already declared or resolved")
- if ident.Name != "_" {
- obj := ast.NewObj(kind, ident.Name)
- // remember the corresponding declaration for redeclaration
- // errors and global variable resolution/typechecking phase
- obj.Decl = decl
- if alt := scope.Insert(obj); alt != nil && p.mode&DeclarationErrors != 0 {
- prevDecl := ""
- if pos := alt.Pos(); pos.IsValid() {
- prevDecl = fmt.Sprintf("\n\tprevious declaration at %s", p.file.Position(pos))
- }
- p.error(ident.Pos(), fmt.Sprintf("%s redeclared in this block%s", ident.Name, prevDecl))
- }
- ident.Obj = obj
- }
- }
-}
-
-func (p *parser) shortVarDecl(idents []*ast.Ident) {
- // Go spec: A short variable declaration may redeclare variables
- // provided they were originally declared in the same block with
- // the same type, and at least one of the non-blank variables is new.
- n := 0 // number of new variables
- for _, ident := range idents {
- assert(ident.Obj == nil, "identifier already declared or resolved")
- if ident.Name != "_" {
- obj := ast.NewObj(ast.Var, ident.Name)
- // short var declarations cannot have redeclaration errors
- // and are not global => no need to remember the respective
- // declaration
- alt := p.topScope.Insert(obj)
- if alt == nil {
- n++ // new declaration
- alt = obj
- }
- ident.Obj = alt
- }
- }
- if n == 0 && p.mode&DeclarationErrors != 0 {
- p.error(idents[0].Pos(), "no new variables on left side of :=")
- }
-}
-
-// The unresolved object is a sentinel to mark identifiers that have been added
-// to the list of unresolved identifiers. The sentinel is only used for verifying
-// internal consistency.
-var unresolved = new(ast.Object)
-
-func (p *parser) resolve(x ast.Expr) {
- // nothing to do if x is not an identifier or the blank identifier
- ident, _ := x.(*ast.Ident)
- if ident == nil {
- return
- }
- assert(ident.Obj == nil, "identifier already declared or resolved")
- if ident.Name == "_" {
- return
- }
- // try to resolve the identifier
- for s := p.topScope; s != nil; s = s.Outer {
- if obj := s.Lookup(ident.Name); obj != nil {
- ident.Obj = obj
- return
- }
- }
- // all local scopes are known, so any unresolved identifier
- // must be found either in the file scope, package scope
- // (perhaps in another file), or universe scope --- collect
- // them so that they can be resolved later
- ident.Obj = unresolved
- p.unresolved = append(p.unresolved, ident)
-}
-
-// ----------------------------------------------------------------------------
-// Parsing support
-
-func (p *parser) printTrace(a ...interface{}) {
- const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " +
- ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . "
- const n = uint(len(dots))
- pos := p.file.Position(p.pos)
- fmt.Printf("%5d:%3d: ", pos.Line, pos.Column)
- i := 2 * p.indent
- for ; i > n; i -= n {
- fmt.Print(dots)
- }
- fmt.Print(dots[0:i])
- fmt.Println(a...)
-}
-
-func trace(p *parser, msg string) *parser {
- p.printTrace(msg, "(")
- p.indent++
- return p
-}
-
-// Usage pattern: defer un(trace(p, "..."));
-func un(p *parser) {
- p.indent--
- p.printTrace(")")
-}
-
-// Advance to the next token.
-func (p *parser) next0() {
- // Because of one-token look-ahead, print the previous token
- // when tracing as it provides a more readable output. The
- // very first token (!p.pos.IsValid()) is not initialized
- // (it is token.ILLEGAL), so don't print it .
- if p.trace && p.pos.IsValid() {
- s := p.tok.String()
- switch {
- case p.tok.IsLiteral():
- p.printTrace(s, p.lit)
- case p.tok.IsOperator(), p.tok.IsKeyword():
- p.printTrace("\"" + s + "\"")
- default:
- p.printTrace(s)
- }
- }
-
- p.pos, p.tok, p.lit = p.scanner.Scan()
-}
-
-// Consume a comment and return it and the line on which it ends.
-func (p *parser) consumeComment() (comment *ast.Comment, endline int) {
- // /*-style comments may end on a different line than where they start.
- // Scan the comment for '\n' chars and adjust endline accordingly.
- endline = p.file.Line(p.pos)
- if p.lit[1] == '*' {
- // don't use range here - no need to decode Unicode code points
- for i := 0; i < len(p.lit); i++ {
- if p.lit[i] == '\n' {
- endline++
- }
- }
- }
-
- comment = &ast.Comment{p.pos, p.lit}
- p.next0()
-
- return
-}
-
-// Consume a group of adjacent comments, add it to the parser's
-// comments list, and return it together with the line at which
-// the last comment in the group ends. An empty line or non-comment
-// token terminates a comment group.
-//
-func (p *parser) consumeCommentGroup() (comments *ast.CommentGroup, endline int) {
- var list []*ast.Comment
- endline = p.file.Line(p.pos)
- for p.tok == token.COMMENT && endline+1 >= p.file.Line(p.pos) {
- var comment *ast.Comment
- comment, endline = p.consumeComment()
- list = append(list, comment)
- }
-
- // add comment group to the comments list
- comments = &ast.CommentGroup{list}
- p.comments = append(p.comments, comments)
-
- return
-}
-
-// Advance to the next non-comment token. In the process, collect
-// any comment groups encountered, and remember the last lead and
-// and line comments.
-//
-// A lead comment is a comment group that starts and ends in a
-// line without any other tokens and that is followed by a non-comment
-// token on the line immediately after the comment group.
-//
-// A line comment is a comment group that follows a non-comment
-// token on the same line, and that has no tokens after it on the line
-// where it ends.
-//
-// Lead and line comments may be considered documentation that is
-// stored in the AST.
-//
-func (p *parser) next() {
- p.leadComment = nil
- p.lineComment = nil
- line := p.file.Line(p.pos) // current line
- p.next0()
-
- if p.tok == token.COMMENT {
- var comment *ast.CommentGroup
- var endline int
-
- if p.file.Line(p.pos) == line {
- // The comment is on same line as the previous token; it
- // cannot be a lead comment but may be a line comment.
- comment, endline = p.consumeCommentGroup()
- if p.file.Line(p.pos) != endline {
- // The next token is on a different line, thus
- // the last comment group is a line comment.
- p.lineComment = comment
- }
- }
-
- // consume successor comments, if any
- endline = -1
- for p.tok == token.COMMENT {
- comment, endline = p.consumeCommentGroup()
- }
-
- if endline+1 == p.file.Line(p.pos) {
- // The next token is following on the line immediately after the
- // comment group, thus the last comment group is a lead comment.
- p.leadComment = comment
- }
- }
-}
-
-func (p *parser) error(pos token.Pos, msg string) {
- p.Error(p.file.Position(pos), msg)
-}
-
-func (p *parser) errorExpected(pos token.Pos, msg string) {
- msg = "expected " + msg
- if pos == p.pos {
- // the error happened at the current position;
- // make the error message more specific
- if p.tok == token.SEMICOLON && p.lit[0] == '\n' {
- msg += ", found newline"
- } else {
- msg += ", found '" + p.tok.String() + "'"
- if p.tok.IsLiteral() {
- msg += " " + p.lit
- }
- }
- }
- p.error(pos, msg)
-}
-
-func (p *parser) expect(tok token.Token) token.Pos {
- pos := p.pos
- if p.tok != tok {
- p.errorExpected(pos, "'"+tok.String()+"'")
- }
- p.next() // make progress
- return pos
-}
-
-func (p *parser) expectSemi() {
- if p.tok != token.RPAREN && p.tok != token.RBRACE {
- p.expect(token.SEMICOLON)
- }
-}
-
-func assert(cond bool, msg string) {
- if !cond {
- panic("go/parser internal error: " + msg)
- }
-}
-
-// ----------------------------------------------------------------------------
-// Identifiers
-
-func (p *parser) parseIdent() *ast.Ident {
- pos := p.pos
- name := "_"
- if p.tok == token.IDENT {
- name = p.lit
- p.next()
- } else {
- p.expect(token.IDENT) // use expect() error handling
- }
- return &ast.Ident{pos, name, nil}
-}
-
-func (p *parser) parseIdentList() (list []*ast.Ident) {
- if p.trace {
- defer un(trace(p, "IdentList"))
- }
-
- list = append(list, p.parseIdent())
- for p.tok == token.COMMA {
- p.next()
- list = append(list, p.parseIdent())
- }
-
- return
-}
-
-// ----------------------------------------------------------------------------
-// Common productions
-
-// If lhs is set, result list elements which are identifiers are not resolved.
-func (p *parser) parseExprList(lhs bool) (list []ast.Expr) {
- if p.trace {
- defer un(trace(p, "ExpressionList"))
- }
-
- list = append(list, p.parseExpr(lhs))
- for p.tok == token.COMMA {
- p.next()
- list = append(list, p.parseExpr(lhs))
- }
-
- return
-}
-
-func (p *parser) parseLhsList() []ast.Expr {
- list := p.parseExprList(true)
- switch p.tok {
- case token.DEFINE:
- // lhs of a short variable declaration
- p.shortVarDecl(p.makeIdentList(list))
- case token.COLON:
- // lhs of a label declaration or a communication clause of a select
- // statement (parseLhsList is not called when parsing the case clause
- // of a switch statement):
- // - labels are declared by the caller of parseLhsList
- // - for communication clauses, if there is a stand-alone identifier
- // followed by a colon, we have a syntax error; there is no need
- // to resolve the identifier in that case
- default:
- // identifiers must be declared elsewhere
- for _, x := range list {
- p.resolve(x)
- }
- }
- return list
-}
-
-func (p *parser) parseRhsList() []ast.Expr {
- return p.parseExprList(false)
-}
-
-// ----------------------------------------------------------------------------
-// Types
-
-func (p *parser) parseType() ast.Expr {
- if p.trace {
- defer un(trace(p, "Type"))
- }
-
- typ := p.tryType()
-
- if typ == nil {
- pos := p.pos
- p.errorExpected(pos, "type")
- p.next() // make progress
- return &ast.BadExpr{pos, p.pos}
- }
-
- return typ
-}
-
-// If the result is an identifier, it is not resolved.
-func (p *parser) parseTypeName() ast.Expr {
- if p.trace {
- defer un(trace(p, "TypeName"))
- }
-
- ident := p.parseIdent()
- // don't resolve ident yet - it may be a parameter or field name
-
- if p.tok == token.PERIOD {
- // ident is a package name
- p.next()
- p.resolve(ident)
- sel := p.parseIdent()
- return &ast.SelectorExpr{ident, sel}
- }
-
- return ident
-}
-
-func (p *parser) parseArrayType(ellipsisOk bool) ast.Expr {
- if p.trace {
- defer un(trace(p, "ArrayType"))
- }
-
- lbrack := p.expect(token.LBRACK)
- var len ast.Expr
- if ellipsisOk && p.tok == token.ELLIPSIS {
- len = &ast.Ellipsis{p.pos, nil}
- p.next()
- } else if p.tok != token.RBRACK {
- len = p.parseRhs()
- }
- p.expect(token.RBRACK)
- elt := p.parseType()
-
- return &ast.ArrayType{lbrack, len, elt}
-}
-
-func (p *parser) makeIdentList(list []ast.Expr) []*ast.Ident {
- idents := make([]*ast.Ident, len(list))
- for i, x := range list {
- ident, isIdent := x.(*ast.Ident)
- if !isIdent {
- pos := x.(ast.Expr).Pos()
- p.errorExpected(pos, "identifier")
- ident = &ast.Ident{pos, "_", nil}
- }
- idents[i] = ident
- }
- return idents
-}
-
-func (p *parser) parseFieldDecl(scope *ast.Scope) *ast.Field {
- if p.trace {
- defer un(trace(p, "FieldDecl"))
- }
-
- doc := p.leadComment
-
- // fields
- list, typ := p.parseVarList(false)
-
- // optional tag
- var tag *ast.BasicLit
- if p.tok == token.STRING {
- tag = &ast.BasicLit{p.pos, p.tok, p.lit}
- p.next()
- }
-
- // analyze case
- var idents []*ast.Ident
- if typ != nil {
- // IdentifierList Type
- idents = p.makeIdentList(list)
- } else {
- // ["*"] TypeName (AnonymousField)
- typ = list[0] // we always have at least one element
- p.resolve(typ)
- if n := len(list); n > 1 || !isTypeName(deref(typ)) {
- pos := typ.Pos()
- p.errorExpected(pos, "anonymous field")
- typ = &ast.BadExpr{pos, list[n-1].End()}
- }
- }
-
- p.expectSemi() // call before accessing p.linecomment
-
- field := &ast.Field{doc, idents, typ, tag, p.lineComment}
- p.declare(field, scope, ast.Var, idents...)
-
- return field
-}
-
-func (p *parser) parseStructType() *ast.StructType {
- if p.trace {
- defer un(trace(p, "StructType"))
- }
-
- pos := p.expect(token.STRUCT)
- lbrace := p.expect(token.LBRACE)
- scope := ast.NewScope(nil) // struct scope
- var list []*ast.Field
- for p.tok == token.IDENT || p.tok == token.MUL || p.tok == token.LPAREN {
- // a field declaration cannot start with a '(' but we accept
- // it here for more robust parsing and better error messages
- // (parseFieldDecl will check and complain if necessary)
- list = append(list, p.parseFieldDecl(scope))
- }
- rbrace := p.expect(token.RBRACE)
-
- // TODO(gri): store struct scope in AST
- return &ast.StructType{pos, &ast.FieldList{lbrace, list, rbrace}, false}
-}
-
-func (p *parser) parsePointerType() *ast.StarExpr {
- if p.trace {
- defer un(trace(p, "PointerType"))
- }
-
- star := p.expect(token.MUL)
- base := p.parseType()
-
- return &ast.StarExpr{star, base}
-}
-
-func (p *parser) tryVarType(isParam bool) ast.Expr {
- if isParam && p.tok == token.ELLIPSIS {
- pos := p.pos
- p.next()
- typ := p.tryIdentOrType(isParam) // don't use parseType so we can provide better error message
- if typ == nil {
- p.error(pos, "'...' parameter is missing type")
- typ = &ast.BadExpr{pos, p.pos}
- }
- if p.tok != token.RPAREN {
- p.error(pos, "can use '...' with last parameter type only")
- }
- return &ast.Ellipsis{pos, typ}
- }
- return p.tryIdentOrType(false)
-}
-
-func (p *parser) parseVarType(isParam bool) ast.Expr {
- typ := p.tryVarType(isParam)
- if typ == nil {
- pos := p.pos
- p.errorExpected(pos, "type")
- p.next() // make progress
- typ = &ast.BadExpr{pos, p.pos}
- }
- return typ
-}
-
-func (p *parser) parseVarList(isParam bool) (list []ast.Expr, typ ast.Expr) {
- if p.trace {
- defer un(trace(p, "VarList"))
- }
-
- // a list of identifiers looks like a list of type names
- for {
- // parseVarType accepts any type (including parenthesized ones)
- // even though the syntax does not permit them here: we
- // accept them all for more robust parsing and complain
- // afterwards
- list = append(list, p.parseVarType(isParam))
- if p.tok != token.COMMA {
- break
- }
- p.next()
- }
-
- // if we had a list of identifiers, it must be followed by a type
- typ = p.tryVarType(isParam)
- if typ != nil {
- p.resolve(typ)
- }
-
- return
-}
-
-func (p *parser) parseParameterList(scope *ast.Scope, ellipsisOk bool) (params []*ast.Field) {
- if p.trace {
- defer un(trace(p, "ParameterList"))
- }
-
- list, typ := p.parseVarList(ellipsisOk)
- if typ != nil {
- // IdentifierList Type
- idents := p.makeIdentList(list)
- field := &ast.Field{nil, idents, typ, nil, nil}
- params = append(params, field)
- // Go spec: The scope of an identifier denoting a function
- // parameter or result variable is the function body.
- p.declare(field, scope, ast.Var, idents...)
- if p.tok == token.COMMA {
- p.next()
- }
-
- for p.tok != token.RPAREN && p.tok != token.EOF {
- idents := p.parseIdentList()
- typ := p.parseVarType(ellipsisOk)
- field := &ast.Field{nil, idents, typ, nil, nil}
- params = append(params, field)
- // Go spec: The scope of an identifier denoting a function
- // parameter or result variable is the function body.
- p.declare(field, scope, ast.Var, idents...)
- if p.tok != token.COMMA {
- break
- }
- p.next()
- }
-
- } else {
- // Type { "," Type } (anonymous parameters)
- params = make([]*ast.Field, len(list))
- for i, x := range list {
- p.resolve(x)
- params[i] = &ast.Field{Type: x}
- }
- }
-
- return
-}
-
-func (p *parser) parseParameters(scope *ast.Scope, ellipsisOk bool) *ast.FieldList {
- if p.trace {
- defer un(trace(p, "Parameters"))
- }
-
- var params []*ast.Field
- lparen := p.expect(token.LPAREN)
- if p.tok != token.RPAREN {
- params = p.parseParameterList(scope, ellipsisOk)
- }
- rparen := p.expect(token.RPAREN)
-
- return &ast.FieldList{lparen, params, rparen}
-}
-
-func (p *parser) parseResult(scope *ast.Scope) *ast.FieldList {
- if p.trace {
- defer un(trace(p, "Result"))
- }
-
- if p.tok == token.LPAREN {
- return p.parseParameters(scope, false)
- }
-
- typ := p.tryType()
- if typ != nil {
- list := make([]*ast.Field, 1)
- list[0] = &ast.Field{Type: typ}
- return &ast.FieldList{List: list}
- }
-
- return nil
-}
-
-func (p *parser) parseSignature(scope *ast.Scope) (params, results *ast.FieldList) {
- if p.trace {
- defer un(trace(p, "Signature"))
- }
-
- params = p.parseParameters(scope, true)
- results = p.parseResult(scope)
-
- return
-}
-
-func (p *parser) parseFuncType() (*ast.FuncType, *ast.Scope) {
- if p.trace {
- defer un(trace(p, "FuncType"))
- }
-
- pos := p.expect(token.FUNC)
- scope := ast.NewScope(p.topScope) // function scope
- params, results := p.parseSignature(scope)
-
- return &ast.FuncType{pos, params, results}, scope
-}
-
-func (p *parser) parseMethodSpec(scope *ast.Scope) *ast.Field {
- if p.trace {
- defer un(trace(p, "MethodSpec"))
- }
-
- doc := p.leadComment
- var idents []*ast.Ident
- var typ ast.Expr
- x := p.parseTypeName()
- if ident, isIdent := x.(*ast.Ident); isIdent && p.tok == token.LPAREN {
- // method
- idents = []*ast.Ident{ident}
- scope := ast.NewScope(nil) // method scope
- params, results := p.parseSignature(scope)
- typ = &ast.FuncType{token.NoPos, params, results}
- } else {
- // embedded interface
- typ = x
- }
- p.expectSemi() // call before accessing p.linecomment
-
- spec := &ast.Field{doc, idents, typ, nil, p.lineComment}
- p.declare(spec, scope, ast.Fun, idents...)
-
- return spec
-}
-
-func (p *parser) parseInterfaceType() *ast.InterfaceType {
- if p.trace {
- defer un(trace(p, "InterfaceType"))
- }
-
- pos := p.expect(token.INTERFACE)
- lbrace := p.expect(token.LBRACE)
- scope := ast.NewScope(nil) // interface scope
- var list []*ast.Field
- for p.tok == token.IDENT {
- list = append(list, p.parseMethodSpec(scope))
- }
- rbrace := p.expect(token.RBRACE)
-
- // TODO(gri): store interface scope in AST
- return &ast.InterfaceType{pos, &ast.FieldList{lbrace, list, rbrace}, false}
-}
-
-func (p *parser) parseMapType() *ast.MapType {
- if p.trace {
- defer un(trace(p, "MapType"))
- }
-
- pos := p.expect(token.MAP)
- p.expect(token.LBRACK)
- key := p.parseType()
- p.expect(token.RBRACK)
- value := p.parseType()
-
- return &ast.MapType{pos, key, value}
-}
-
-func (p *parser) parseChanType() *ast.ChanType {
- if p.trace {
- defer un(trace(p, "ChanType"))
- }
-
- pos := p.pos
- dir := ast.SEND | ast.RECV
- if p.tok == token.CHAN {
- p.next()
- if p.tok == token.ARROW {
- p.next()
- dir = ast.SEND
- }
- } else {
- p.expect(token.ARROW)
- p.expect(token.CHAN)
- dir = ast.RECV
- }
- value := p.parseType()
-
- return &ast.ChanType{pos, dir, value}
-}
-
-// If the result is an identifier, it is not resolved.
-func (p *parser) tryIdentOrType(ellipsisOk bool) ast.Expr {
- switch p.tok {
- case token.IDENT:
- return p.parseTypeName()
- case token.LBRACK:
- return p.parseArrayType(ellipsisOk)
- case token.STRUCT:
- return p.parseStructType()
- case token.MUL:
- return p.parsePointerType()
- case token.FUNC:
- typ, _ := p.parseFuncType()
- return typ
- case token.INTERFACE:
- return p.parseInterfaceType()
- case token.MAP:
- return p.parseMapType()
- case token.CHAN, token.ARROW:
- return p.parseChanType()
- case token.LPAREN:
- lparen := p.pos
- p.next()
- typ := p.parseType()
- rparen := p.expect(token.RPAREN)
- return &ast.ParenExpr{lparen, typ, rparen}
- }
-
- // no type found
- return nil
-}
-
-func (p *parser) tryType() ast.Expr {
- typ := p.tryIdentOrType(false)
- if typ != nil {
- p.resolve(typ)
- }
- return typ
-}
-
-// ----------------------------------------------------------------------------
-// Blocks
-
-func (p *parser) parseStmtList() (list []ast.Stmt) {
- if p.trace {
- defer un(trace(p, "StatementList"))
- }
-
- for p.tok != token.CASE && p.tok != token.DEFAULT && p.tok != token.RBRACE && p.tok != token.EOF {
- list = append(list, p.parseStmt())
- }
-
- return
-}
-
-func (p *parser) parseBody(scope *ast.Scope) *ast.BlockStmt {
- if p.trace {
- defer un(trace(p, "Body"))
- }
-
- lbrace := p.expect(token.LBRACE)
- p.topScope = scope // open function scope
- p.openLabelScope()
- list := p.parseStmtList()
- p.closeLabelScope()
- p.closeScope()
- rbrace := p.expect(token.RBRACE)
-
- return &ast.BlockStmt{lbrace, list, rbrace}
-}
-
-func (p *parser) parseBlockStmt() *ast.BlockStmt {
- if p.trace {
- defer un(trace(p, "BlockStmt"))
- }
-
- lbrace := p.expect(token.LBRACE)
- p.openScope()
- list := p.parseStmtList()
- p.closeScope()
- rbrace := p.expect(token.RBRACE)
-
- return &ast.BlockStmt{lbrace, list, rbrace}
-}
-
-// ----------------------------------------------------------------------------
-// Expressions
-
-func (p *parser) parseFuncTypeOrLit() ast.Expr {
- if p.trace {
- defer un(trace(p, "FuncTypeOrLit"))
- }
-
- typ, scope := p.parseFuncType()
- if p.tok != token.LBRACE {
- // function type only
- return typ
- }
-
- p.exprLev++
- body := p.parseBody(scope)
- p.exprLev--
-
- return &ast.FuncLit{typ, body}
-}
-
-// parseOperand may return an expression or a raw type (incl. array
-// types of the form [...]T. Callers must verify the result.
-// If lhs is set and the result is an identifier, it is not resolved.
-//
-func (p *parser) parseOperand(lhs bool) ast.Expr {
- if p.trace {
- defer un(trace(p, "Operand"))
- }
-
- switch p.tok {
- case token.IDENT:
- x := p.parseIdent()
- if !lhs {
- p.resolve(x)
- }
- return x
-
- case token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING:
- x := &ast.BasicLit{p.pos, p.tok, p.lit}
- p.next()
- return x
-
- case token.LPAREN:
- lparen := p.pos
- p.next()
- p.exprLev++
- x := p.parseRhs()
- p.exprLev--
- rparen := p.expect(token.RPAREN)
- return &ast.ParenExpr{lparen, x, rparen}
-
- case token.FUNC:
- return p.parseFuncTypeOrLit()
-
- default:
- if typ := p.tryIdentOrType(true); typ != nil {
- // could be type for composite literal or conversion
- _, isIdent := typ.(*ast.Ident)
- assert(!isIdent, "type cannot be identifier")
- return typ
- }
- }
-
- pos := p.pos
- p.errorExpected(pos, "operand")
- p.next() // make progress
- return &ast.BadExpr{pos, p.pos}
-}
-
-func (p *parser) parseSelector(x ast.Expr) ast.Expr {
- if p.trace {
- defer un(trace(p, "Selector"))
- }
-
- sel := p.parseIdent()
-
- return &ast.SelectorExpr{x, sel}
-}
-
-func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr {
- if p.trace {
- defer un(trace(p, "TypeAssertion"))
- }
-
- p.expect(token.LPAREN)
- var typ ast.Expr
- if p.tok == token.TYPE {
- // type switch: typ == nil
- p.next()
- } else {
- typ = p.parseType()
- }
- p.expect(token.RPAREN)
-
- return &ast.TypeAssertExpr{x, typ}
-}
-
-func (p *parser) parseIndexOrSlice(x ast.Expr) ast.Expr {
- if p.trace {
- defer un(trace(p, "IndexOrSlice"))
- }
-
- lbrack := p.expect(token.LBRACK)
- p.exprLev++
- var low, high ast.Expr
- isSlice := false
- if p.tok != token.COLON {
- low = p.parseRhs()
- }
- if p.tok == token.COLON {
- isSlice = true
- p.next()
- if p.tok != token.RBRACK {
- high = p.parseRhs()
- }
- }
- p.exprLev--
- rbrack := p.expect(token.RBRACK)
-
- if isSlice {
- return &ast.SliceExpr{x, lbrack, low, high, rbrack}
- }
- return &ast.IndexExpr{x, lbrack, low, rbrack}
-}
-
-func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr {
- if p.trace {
- defer un(trace(p, "CallOrConversion"))
- }
-
- lparen := p.expect(token.LPAREN)
- p.exprLev++
- var list []ast.Expr
- var ellipsis token.Pos
- for p.tok != token.RPAREN && p.tok != token.EOF && !ellipsis.IsValid() {
- list = append(list, p.parseRhs())
- if p.tok == token.ELLIPSIS {
- ellipsis = p.pos
- p.next()
- }
- if p.tok != token.COMMA {
- break
- }
- p.next()
- }
- p.exprLev--
- rparen := p.expect(token.RPAREN)
-
- return &ast.CallExpr{fun, lparen, list, ellipsis, rparen}
-}
-
-func (p *parser) parseElement(keyOk bool) ast.Expr {
- if p.trace {
- defer un(trace(p, "Element"))
- }
-
- if p.tok == token.LBRACE {
- return p.parseLiteralValue(nil)
- }
-
- x := p.parseExpr(keyOk) // don't resolve if map key
- if keyOk {
- if p.tok == token.COLON {
- colon := p.pos
- p.next()
- return &ast.KeyValueExpr{x, colon, p.parseElement(false)}
- }
- p.resolve(x) // not a map key
- }
-
- return x
-}
-
-func (p *parser) parseElementList() (list []ast.Expr) {
- if p.trace {
- defer un(trace(p, "ElementList"))
- }
-
- for p.tok != token.RBRACE && p.tok != token.EOF {
- list = append(list, p.parseElement(true))
- if p.tok != token.COMMA {
- break
- }
- p.next()
- }
-
- return
-}
-
-func (p *parser) parseLiteralValue(typ ast.Expr) ast.Expr {
- if p.trace {
- defer un(trace(p, "LiteralValue"))
- }
-
- lbrace := p.expect(token.LBRACE)
- var elts []ast.Expr
- p.exprLev++
- if p.tok != token.RBRACE {
- elts = p.parseElementList()
- }
- p.exprLev--
- rbrace := p.expect(token.RBRACE)
- return &ast.CompositeLit{typ, lbrace, elts, rbrace}
-}
-
-// checkExpr checks that x is an expression (and not a type).
-func (p *parser) checkExpr(x ast.Expr) ast.Expr {
- switch t := unparen(x).(type) {
- case *ast.BadExpr:
- case *ast.Ident:
- case *ast.BasicLit:
- case *ast.FuncLit:
- case *ast.CompositeLit:
- case *ast.ParenExpr:
- panic("unreachable")
- case *ast.SelectorExpr:
- case *ast.IndexExpr:
- case *ast.SliceExpr:
- case *ast.TypeAssertExpr:
- if t.Type == nil {
- // the form X.(type) is only allowed in type switch expressions
- p.errorExpected(x.Pos(), "expression")
- x = &ast.BadExpr{x.Pos(), x.End()}
- }
- case *ast.CallExpr:
- case *ast.StarExpr:
- case *ast.UnaryExpr:
- if t.Op == token.RANGE {
- // the range operator is only allowed at the top of a for statement
- p.errorExpected(x.Pos(), "expression")
- x = &ast.BadExpr{x.Pos(), x.End()}
- }
- case *ast.BinaryExpr:
- default:
- // all other nodes are not proper expressions
- p.errorExpected(x.Pos(), "expression")
- x = &ast.BadExpr{x.Pos(), x.End()}
- }
- return x
-}
-
-// isTypeName returns true iff x is a (qualified) TypeName.
-func isTypeName(x ast.Expr) bool {
- switch t := x.(type) {
- case *ast.BadExpr:
- case *ast.Ident:
- case *ast.SelectorExpr:
- _, isIdent := t.X.(*ast.Ident)
- return isIdent
- default:
- return false // all other nodes are not type names
- }
- return true
-}
-
-// isLiteralType returns true iff x is a legal composite literal type.
-func isLiteralType(x ast.Expr) bool {
- switch t := x.(type) {
- case *ast.BadExpr:
- case *ast.Ident:
- case *ast.SelectorExpr:
- _, isIdent := t.X.(*ast.Ident)
- return isIdent
- case *ast.ArrayType:
- case *ast.StructType:
- case *ast.MapType:
- default:
- return false // all other nodes are not legal composite literal types
- }
- return true
-}
-
-// If x is of the form *T, deref returns T, otherwise it returns x.
-func deref(x ast.Expr) ast.Expr {
- if p, isPtr := x.(*ast.StarExpr); isPtr {
- x = p.X
- }
- return x
-}
-
-// If x is of the form (T), unparen returns unparen(T), otherwise it returns x.
-func unparen(x ast.Expr) ast.Expr {
- if p, isParen := x.(*ast.ParenExpr); isParen {
- x = unparen(p.X)
- }
- return x
-}
-
-// checkExprOrType checks that x is an expression or a type
-// (and not a raw type such as [...]T).
-//
-func (p *parser) checkExprOrType(x ast.Expr) ast.Expr {
- switch t := unparen(x).(type) {
- case *ast.ParenExpr:
- panic("unreachable")
- case *ast.UnaryExpr:
- if t.Op == token.RANGE {
- // the range operator is only allowed at the top of a for statement
- p.errorExpected(x.Pos(), "expression")
- x = &ast.BadExpr{x.Pos(), x.End()}
- }
- case *ast.ArrayType:
- if len, isEllipsis := t.Len.(*ast.Ellipsis); isEllipsis {
- p.error(len.Pos(), "expected array length, found '...'")
- x = &ast.BadExpr{x.Pos(), x.End()}
- }
- }
-
- // all other nodes are expressions or types
- return x
-}
-
-// If lhs is set and the result is an identifier, it is not resolved.
-func (p *parser) parsePrimaryExpr(lhs bool) ast.Expr {
- if p.trace {
- defer un(trace(p, "PrimaryExpr"))
- }
-
- x := p.parseOperand(lhs)
-L:
- for {
- switch p.tok {
- case token.PERIOD:
- p.next()
- if lhs {
- p.resolve(x)
- }
- switch p.tok {
- case token.IDENT:
- x = p.parseSelector(p.checkExpr(x))
- case token.LPAREN:
- x = p.parseTypeAssertion(p.checkExpr(x))
- default:
- pos := p.pos
- p.next() // make progress
- p.errorExpected(pos, "selector or type assertion")
- x = &ast.BadExpr{pos, p.pos}
- }
- case token.LBRACK:
- if lhs {
- p.resolve(x)
- }
- x = p.parseIndexOrSlice(p.checkExpr(x))
- case token.LPAREN:
- if lhs {
- p.resolve(x)
- }
- x = p.parseCallOrConversion(p.checkExprOrType(x))
- case token.LBRACE:
- if isLiteralType(x) && (p.exprLev >= 0 || !isTypeName(x)) {
- if lhs {
- p.resolve(x)
- }
- x = p.parseLiteralValue(x)
- } else {
- break L
- }
- default:
- break L
- }
- lhs = false // no need to try to resolve again
- }
-
- return x
-}
-
-// If lhs is set and the result is an identifier, it is not resolved.
-func (p *parser) parseUnaryExpr(lhs bool) ast.Expr {
- if p.trace {
- defer un(trace(p, "UnaryExpr"))
- }
-
- switch p.tok {
- case token.ADD, token.SUB, token.NOT, token.XOR, token.AND, token.RANGE:
- pos, op := p.pos, p.tok
- p.next()
- x := p.parseUnaryExpr(false)
- return &ast.UnaryExpr{pos, op, p.checkExpr(x)}
-
- case token.ARROW:
- // channel type or receive expression
- pos := p.pos
- p.next()
- if p.tok == token.CHAN {
- p.next()
- value := p.parseType()
- return &ast.ChanType{pos, ast.RECV, value}
- }
-
- x := p.parseUnaryExpr(false)
- return &ast.UnaryExpr{pos, token.ARROW, p.checkExpr(x)}
-
- case token.MUL:
- // pointer type or unary "*" expression
- pos := p.pos
- p.next()
- x := p.parseUnaryExpr(false)
- return &ast.StarExpr{pos, p.checkExprOrType(x)}
- }
-
- return p.parsePrimaryExpr(lhs)
-}
-
-// If lhs is set and the result is an identifier, it is not resolved.
-func (p *parser) parseBinaryExpr(lhs bool, prec1 int) ast.Expr {
- if p.trace {
- defer un(trace(p, "BinaryExpr"))
- }
-
- x := p.parseUnaryExpr(lhs)
- for prec := p.tok.Precedence(); prec >= prec1; prec-- {
- for p.tok.Precedence() == prec {
- pos, op := p.pos, p.tok
- p.next()
- if lhs {
- p.resolve(x)
- lhs = false
- }
- y := p.parseBinaryExpr(false, prec+1)
- x = &ast.BinaryExpr{p.checkExpr(x), pos, op, p.checkExpr(y)}
- }
- }
-
- return x
-}
-
-// If lhs is set and the result is an identifier, it is not resolved.
-// TODO(gri): parseExpr may return a type or even a raw type ([..]int) -
-// should reject when a type/raw type is obviously not allowed
-func (p *parser) parseExpr(lhs bool) ast.Expr {
- if p.trace {
- defer un(trace(p, "Expression"))
- }
-
- return p.parseBinaryExpr(lhs, token.LowestPrec+1)
-}
-
-func (p *parser) parseRhs() ast.Expr {
- return p.parseExpr(false)
-}
-
-// ----------------------------------------------------------------------------
-// Statements
-
-func (p *parser) parseSimpleStmt(labelOk bool) ast.Stmt {
- if p.trace {
- defer un(trace(p, "SimpleStmt"))
- }
-
- x := p.parseLhsList()
-
- switch p.tok {
- case
- token.DEFINE, token.ASSIGN, token.ADD_ASSIGN,
- token.SUB_ASSIGN, token.MUL_ASSIGN, token.QUO_ASSIGN,
- token.REM_ASSIGN, token.AND_ASSIGN, token.OR_ASSIGN,
- token.XOR_ASSIGN, token.SHL_ASSIGN, token.SHR_ASSIGN, token.AND_NOT_ASSIGN:
- // assignment statement
- pos, tok := p.pos, p.tok
- p.next()
- y := p.parseRhsList()
- return &ast.AssignStmt{x, pos, tok, y}
- }
-
- if len(x) > 1 {
- p.errorExpected(x[0].Pos(), "1 expression")
- // continue with first expression
- }
-
- switch p.tok {
- case token.COLON:
- // labeled statement
- colon := p.pos
- p.next()
- if label, isIdent := x[0].(*ast.Ident); labelOk && isIdent {
- // Go spec: The scope of a label is the body of the function
- // in which it is declared and excludes the body of any nested
- // function.
- stmt := &ast.LabeledStmt{label, colon, p.parseStmt()}
- p.declare(stmt, p.labelScope, ast.Lbl, label)
- return stmt
- }
- p.error(x[0].Pos(), "illegal label declaration")
- return &ast.BadStmt{x[0].Pos(), colon + 1}
-
- case token.ARROW:
- // send statement
- arrow := p.pos
- p.next() // consume "<-"
- y := p.parseRhs()
- return &ast.SendStmt{x[0], arrow, y}
-
- case token.INC, token.DEC:
- // increment or decrement
- s := &ast.IncDecStmt{x[0], p.pos, p.tok}
- p.next() // consume "++" or "--"
- return s
- }
-
- // expression
- return &ast.ExprStmt{x[0]}
-}
-
-func (p *parser) parseCallExpr() *ast.CallExpr {
- x := p.parseRhs()
- if call, isCall := x.(*ast.CallExpr); isCall {
- return call
- }
- p.errorExpected(x.Pos(), "function/method call")
- return nil
-}
-
-func (p *parser) parseGoStmt() ast.Stmt {
- if p.trace {
- defer un(trace(p, "GoStmt"))
- }
-
- pos := p.expect(token.GO)
- call := p.parseCallExpr()
- p.expectSemi()
- if call == nil {
- return &ast.BadStmt{pos, pos + 2} // len("go")
- }
-
- return &ast.GoStmt{pos, call}
-}
-
-func (p *parser) parseDeferStmt() ast.Stmt {
- if p.trace {
- defer un(trace(p, "DeferStmt"))
- }
-
- pos := p.expect(token.DEFER)
- call := p.parseCallExpr()
- p.expectSemi()
- if call == nil {
- return &ast.BadStmt{pos, pos + 5} // len("defer")
- }
-
- return &ast.DeferStmt{pos, call}
-}
-
-func (p *parser) parseReturnStmt() *ast.ReturnStmt {
- if p.trace {
- defer un(trace(p, "ReturnStmt"))
- }
-
- pos := p.pos
- p.expect(token.RETURN)
- var x []ast.Expr
- if p.tok != token.SEMICOLON && p.tok != token.RBRACE {
- x = p.parseRhsList()
- }
- p.expectSemi()
-
- return &ast.ReturnStmt{pos, x}
-}
-
-func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt {
- if p.trace {
- defer un(trace(p, "BranchStmt"))
- }
-
- pos := p.expect(tok)
- var label *ast.Ident
- if tok != token.FALLTHROUGH && p.tok == token.IDENT {
- label = p.parseIdent()
- // add to list of unresolved targets
- n := len(p.targetStack) - 1
- p.targetStack[n] = append(p.targetStack[n], label)
- }
- p.expectSemi()
-
- return &ast.BranchStmt{pos, tok, label}
-}
-
-func (p *parser) makeExpr(s ast.Stmt) ast.Expr {
- if s == nil {
- return nil
- }
- if es, isExpr := s.(*ast.ExprStmt); isExpr {
- return p.checkExpr(es.X)
- }
- p.error(s.Pos(), "expected condition, found simple statement")
- return &ast.BadExpr{s.Pos(), s.End()}
-}
-
-func (p *parser) parseIfStmt() *ast.IfStmt {
- if p.trace {
- defer un(trace(p, "IfStmt"))
- }
-
- pos := p.expect(token.IF)
- p.openScope()
- defer p.closeScope()
-
- var s ast.Stmt
- var x ast.Expr
- {
- prevLev := p.exprLev
- p.exprLev = -1
- if p.tok == token.SEMICOLON {
- p.next()
- x = p.parseRhs()
- } else {
- s = p.parseSimpleStmt(false)
- if p.tok == token.SEMICOLON {
- p.next()
- x = p.parseRhs()
- } else {
- x = p.makeExpr(s)
- s = nil
- }
- }
- p.exprLev = prevLev
- }
-
- body := p.parseBlockStmt()
- var else_ ast.Stmt
- if p.tok == token.ELSE {
- p.next()
- else_ = p.parseStmt()
- } else {
- p.expectSemi()
- }
-
- return &ast.IfStmt{pos, s, x, body, else_}
-}
-
-func (p *parser) parseTypeList() (list []ast.Expr) {
- if p.trace {
- defer un(trace(p, "TypeList"))
- }
-
- list = append(list, p.parseType())
- for p.tok == token.COMMA {
- p.next()
- list = append(list, p.parseType())
- }
-
- return
-}
-
-func (p *parser) parseCaseClause(exprSwitch bool) *ast.CaseClause {
- if p.trace {
- defer un(trace(p, "CaseClause"))
- }
-
- pos := p.pos
- var list []ast.Expr
- if p.tok == token.CASE {
- p.next()
- if exprSwitch {
- list = p.parseRhsList()
- } else {
- list = p.parseTypeList()
- }
- } else {
- p.expect(token.DEFAULT)
- }
-
- colon := p.expect(token.COLON)
- p.openScope()
- body := p.parseStmtList()
- p.closeScope()
-
- return &ast.CaseClause{pos, list, colon, body}
-}
-
-func isExprSwitch(s ast.Stmt) bool {
- if s == nil {
- return true
- }
- if e, ok := s.(*ast.ExprStmt); ok {
- if a, ok := e.X.(*ast.TypeAssertExpr); ok {
- return a.Type != nil // regular type assertion
- }
- return true
- }
- return false
-}
-
-func (p *parser) parseSwitchStmt() ast.Stmt {
- if p.trace {
- defer un(trace(p, "SwitchStmt"))
- }
-
- pos := p.expect(token.SWITCH)
- p.openScope()
- defer p.closeScope()
-
- var s1, s2 ast.Stmt
- if p.tok != token.LBRACE {
- prevLev := p.exprLev
- p.exprLev = -1
- if p.tok != token.SEMICOLON {
- s2 = p.parseSimpleStmt(false)
- }
- if p.tok == token.SEMICOLON {
- p.next()
- s1 = s2
- s2 = nil
- if p.tok != token.LBRACE {
- s2 = p.parseSimpleStmt(false)
- }
- }
- p.exprLev = prevLev
- }
-
- exprSwitch := isExprSwitch(s2)
- lbrace := p.expect(token.LBRACE)
- var list []ast.Stmt
- for p.tok == token.CASE || p.tok == token.DEFAULT {
- list = append(list, p.parseCaseClause(exprSwitch))
- }
- rbrace := p.expect(token.RBRACE)
- p.expectSemi()
- body := &ast.BlockStmt{lbrace, list, rbrace}
-
- if exprSwitch {
- return &ast.SwitchStmt{pos, s1, p.makeExpr(s2), body}
- }
- // type switch
- // TODO(gri): do all the checks!
- return &ast.TypeSwitchStmt{pos, s1, s2, body}
-}
-
-func (p *parser) parseCommClause() *ast.CommClause {
- if p.trace {
- defer un(trace(p, "CommClause"))
- }
-
- p.openScope()
- pos := p.pos
- var comm ast.Stmt
- if p.tok == token.CASE {
- p.next()
- lhs := p.parseLhsList()
- if p.tok == token.ARROW {
- // SendStmt
- if len(lhs) > 1 {
- p.errorExpected(lhs[0].Pos(), "1 expression")
- // continue with first expression
- }
- arrow := p.pos
- p.next()
- rhs := p.parseRhs()
- comm = &ast.SendStmt{lhs[0], arrow, rhs}
- } else {
- // RecvStmt
- pos := p.pos
- tok := p.tok
- var rhs ast.Expr
- if tok == token.ASSIGN || tok == token.DEFINE {
- // RecvStmt with assignment
- if len(lhs) > 2 {
- p.errorExpected(lhs[0].Pos(), "1 or 2 expressions")
- // continue with first two expressions
- lhs = lhs[0:2]
- }
- p.next()
- rhs = p.parseRhs()
- } else {
- // rhs must be single receive operation
- if len(lhs) > 1 {
- p.errorExpected(lhs[0].Pos(), "1 expression")
- // continue with first expression
- }
- rhs = lhs[0]
- lhs = nil // there is no lhs
- }
- if x, isUnary := rhs.(*ast.UnaryExpr); !isUnary || x.Op != token.ARROW {
- p.errorExpected(rhs.Pos(), "send or receive operation")
- rhs = &ast.BadExpr{rhs.Pos(), rhs.End()}
- }
- if lhs != nil {
- comm = &ast.AssignStmt{lhs, pos, tok, []ast.Expr{rhs}}
- } else {
- comm = &ast.ExprStmt{rhs}
- }
- }
- } else {
- p.expect(token.DEFAULT)
- }
-
- colon := p.expect(token.COLON)
- body := p.parseStmtList()
- p.closeScope()
-
- return &ast.CommClause{pos, comm, colon, body}
-}
-
-func (p *parser) parseSelectStmt() *ast.SelectStmt {
- if p.trace {
- defer un(trace(p, "SelectStmt"))
- }
-
- pos := p.expect(token.SELECT)
- lbrace := p.expect(token.LBRACE)
- var list []ast.Stmt
- for p.tok == token.CASE || p.tok == token.DEFAULT {
- list = append(list, p.parseCommClause())
- }
- rbrace := p.expect(token.RBRACE)
- p.expectSemi()
- body := &ast.BlockStmt{lbrace, list, rbrace}
-
- return &ast.SelectStmt{pos, body}
-}
-
-func (p *parser) parseForStmt() ast.Stmt {
- if p.trace {
- defer un(trace(p, "ForStmt"))
- }
-
- pos := p.expect(token.FOR)
- p.openScope()
- defer p.closeScope()
-
- var s1, s2, s3 ast.Stmt
- if p.tok != token.LBRACE {
- prevLev := p.exprLev
- p.exprLev = -1
- if p.tok != token.SEMICOLON {
- s2 = p.parseSimpleStmt(false)
- }
- if p.tok == token.SEMICOLON {
- p.next()
- s1 = s2
- s2 = nil
- if p.tok != token.SEMICOLON {
- s2 = p.parseSimpleStmt(false)
- }
- p.expectSemi()
- if p.tok != token.LBRACE {
- s3 = p.parseSimpleStmt(false)
- }
- }
- p.exprLev = prevLev
- }
-
- body := p.parseBlockStmt()
- p.expectSemi()
-
- if as, isAssign := s2.(*ast.AssignStmt); isAssign {
- // possibly a for statement with a range clause; check assignment operator
- if as.Tok != token.ASSIGN && as.Tok != token.DEFINE {
- p.errorExpected(as.TokPos, "'=' or ':='")
- return &ast.BadStmt{pos, body.End()}
- }
- // check lhs
- var key, value ast.Expr
- switch len(as.Lhs) {
- case 2:
- key, value = as.Lhs[0], as.Lhs[1]
- case 1:
- key = as.Lhs[0]
- default:
- p.errorExpected(as.Lhs[0].Pos(), "1 or 2 expressions")
- return &ast.BadStmt{pos, body.End()}
- }
- // check rhs
- if len(as.Rhs) != 1 {
- p.errorExpected(as.Rhs[0].Pos(), "1 expression")
- return &ast.BadStmt{pos, body.End()}
- }
- if rhs, isUnary := as.Rhs[0].(*ast.UnaryExpr); isUnary && rhs.Op == token.RANGE {
- // rhs is range expression
- // (any short variable declaration was handled by parseSimpleStat above)
- return &ast.RangeStmt{pos, key, value, as.TokPos, as.Tok, rhs.X, body}
- }
- p.errorExpected(s2.Pos(), "range clause")
- return &ast.BadStmt{pos, body.End()}
- }
-
- // regular for statement
- return &ast.ForStmt{pos, s1, p.makeExpr(s2), s3, body}
-}
-
-func (p *parser) parseStmt() (s ast.Stmt) {
- if p.trace {
- defer un(trace(p, "Statement"))
- }
-
- switch p.tok {
- case token.CONST, token.TYPE, token.VAR:
- s = &ast.DeclStmt{p.parseDecl()}
- case
- // tokens that may start a top-level expression
- token.IDENT, token.INT, token.FLOAT, token.CHAR, token.STRING, token.FUNC, token.LPAREN, // operand
- token.LBRACK, token.STRUCT, // composite type
- token.MUL, token.AND, token.ARROW, token.ADD, token.SUB, token.XOR: // unary operators
- s = p.parseSimpleStmt(true)
- // because of the required look-ahead, labeled statements are
- // parsed by parseSimpleStmt - don't expect a semicolon after
- // them
- if _, isLabeledStmt := s.(*ast.LabeledStmt); !isLabeledStmt {
- p.expectSemi()
- }
- case token.GO:
- s = p.parseGoStmt()
- case token.DEFER:
- s = p.parseDeferStmt()
- case token.RETURN:
- s = p.parseReturnStmt()
- case token.BREAK, token.CONTINUE, token.GOTO, token.FALLTHROUGH:
- s = p.parseBranchStmt(p.tok)
- case token.LBRACE:
- s = p.parseBlockStmt()
- p.expectSemi()
- case token.IF:
- s = p.parseIfStmt()
- case token.SWITCH:
- s = p.parseSwitchStmt()
- case token.SELECT:
- s = p.parseSelectStmt()
- case token.FOR:
- s = p.parseForStmt()
- case token.SEMICOLON:
- s = &ast.EmptyStmt{p.pos}
- p.next()
- case token.RBRACE:
- // a semicolon may be omitted before a closing "}"
- s = &ast.EmptyStmt{p.pos}
- default:
- // no statement found
- pos := p.pos
- p.errorExpected(pos, "statement")
- p.next() // make progress
- s = &ast.BadStmt{pos, p.pos}
- }
-
- return
-}
-
-// ----------------------------------------------------------------------------
-// Declarations
-
-type parseSpecFunction func(p *parser, doc *ast.CommentGroup, iota int) ast.Spec
-
-func parseImportSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec {
- if p.trace {
- defer un(trace(p, "ImportSpec"))
- }
-
- var ident *ast.Ident
- switch p.tok {
- case token.PERIOD:
- ident = &ast.Ident{p.pos, ".", nil}
- p.next()
- case token.IDENT:
- ident = p.parseIdent()
- }
-
- var path *ast.BasicLit
- if p.tok == token.STRING {
- path = &ast.BasicLit{p.pos, p.tok, p.lit}
- p.next()
- } else {
- p.expect(token.STRING) // use expect() error handling
- }
- p.expectSemi() // call before accessing p.linecomment
-
- // collect imports
- spec := &ast.ImportSpec{doc, ident, path, p.lineComment}
- p.imports = append(p.imports, spec)
-
- return spec
-}
-
-func parseConstSpec(p *parser, doc *ast.CommentGroup, iota int) ast.Spec {
- if p.trace {
- defer un(trace(p, "ConstSpec"))
- }
-
- idents := p.parseIdentList()
- typ := p.tryType()
- var values []ast.Expr
- if typ != nil || p.tok == token.ASSIGN || iota == 0 {
- p.expect(token.ASSIGN)
- values = p.parseRhsList()
- }
- p.expectSemi() // call before accessing p.linecomment
-
- // Go spec: The scope of a constant or variable identifier declared inside
- // a function begins at the end of the ConstSpec or VarSpec and ends at
- // the end of the innermost containing block.
- // (Global identifiers are resolved in a separate phase after parsing.)
- spec := &ast.ValueSpec{doc, idents, typ, values, p.lineComment}
- p.declare(spec, p.topScope, ast.Con, idents...)
-
- return spec
-}
-
-func parseTypeSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec {
- if p.trace {
- defer un(trace(p, "TypeSpec"))
- }
-
- ident := p.parseIdent()
-
- // Go spec: The scope of a type identifier declared inside a function begins
- // at the identifier in the TypeSpec and ends at the end of the innermost
- // containing block.
- // (Global identifiers are resolved in a separate phase after parsing.)
- spec := &ast.TypeSpec{doc, ident, nil, nil}
- p.declare(spec, p.topScope, ast.Typ, ident)
-
- spec.Type = p.parseType()
- p.expectSemi() // call before accessing p.linecomment
- spec.Comment = p.lineComment
-
- return spec
-}
-
-func parseVarSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec {
- if p.trace {
- defer un(trace(p, "VarSpec"))
- }
-
- idents := p.parseIdentList()
- typ := p.tryType()
- var values []ast.Expr
- if typ == nil || p.tok == token.ASSIGN {
- p.expect(token.ASSIGN)
- values = p.parseRhsList()
- }
- p.expectSemi() // call before accessing p.linecomment
-
- // Go spec: The scope of a constant or variable identifier declared inside
- // a function begins at the end of the ConstSpec or VarSpec and ends at
- // the end of the innermost containing block.
- // (Global identifiers are resolved in a separate phase after parsing.)
- spec := &ast.ValueSpec{doc, idents, typ, values, p.lineComment}
- p.declare(spec, p.topScope, ast.Var, idents...)
-
- return spec
-}
-
-func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl {
- if p.trace {
- defer un(trace(p, "GenDecl("+keyword.String()+")"))
- }
-
- doc := p.leadComment
- pos := p.expect(keyword)
- var lparen, rparen token.Pos
- var list []ast.Spec
- if p.tok == token.LPAREN {
- lparen = p.pos
- p.next()
- for iota := 0; p.tok != token.RPAREN && p.tok != token.EOF; iota++ {
- list = append(list, f(p, p.leadComment, iota))
- }
- rparen = p.expect(token.RPAREN)
- p.expectSemi()
- } else {
- list = append(list, f(p, nil, 0))
- }
-
- return &ast.GenDecl{doc, pos, keyword, lparen, list, rparen}
-}
-
-func (p *parser) parseReceiver(scope *ast.Scope) *ast.FieldList {
- if p.trace {
- defer un(trace(p, "Receiver"))
- }
-
- pos := p.pos
- par := p.parseParameters(scope, false)
-
- // must have exactly one receiver
- if par.NumFields() != 1 {
- p.errorExpected(pos, "exactly one receiver")
- // TODO determine a better range for BadExpr below
- par.List = []*ast.Field{{Type: &ast.BadExpr{pos, pos}}}
- return par
- }
-
- // recv type must be of the form ["*"] identifier
- recv := par.List[0]
- base := deref(recv.Type)
- if _, isIdent := base.(*ast.Ident); !isIdent {
- p.errorExpected(base.Pos(), "(unqualified) identifier")
- par.List = []*ast.Field{{Type: &ast.BadExpr{recv.Pos(), recv.End()}}}
- }
-
- return par
-}
-
-func (p *parser) parseFuncDecl() *ast.FuncDecl {
- if p.trace {
- defer un(trace(p, "FunctionDecl"))
- }
-
- doc := p.leadComment
- pos := p.expect(token.FUNC)
- scope := ast.NewScope(p.topScope) // function scope
-
- var recv *ast.FieldList
- if p.tok == token.LPAREN {
- recv = p.parseReceiver(scope)
- }
-
- ident := p.parseIdent()
-
- params, results := p.parseSignature(scope)
-
- var body *ast.BlockStmt
- if p.tok == token.LBRACE {
- body = p.parseBody(scope)
- }
- p.expectSemi()
-
- decl := &ast.FuncDecl{doc, recv, ident, &ast.FuncType{pos, params, results}, body}
- if recv == nil {
- // Go spec: The scope of an identifier denoting a constant, type,
- // variable, or function (but not method) declared at top level
- // (outside any function) is the package block.
- //
- // init() functions cannot be referred to and there may
- // be more than one - don't put them in the pkgScope
- if ident.Name != "init" {
- p.declare(decl, p.pkgScope, ast.Fun, ident)
- }
- }
-
- return decl
-}
-
-func (p *parser) parseDecl() ast.Decl {
- if p.trace {
- defer un(trace(p, "Declaration"))
- }
-
- var f parseSpecFunction
- switch p.tok {
- case token.CONST:
- f = parseConstSpec
-
- case token.TYPE:
- f = parseTypeSpec
-
- case token.VAR:
- f = parseVarSpec
-
- case token.FUNC:
- return p.parseFuncDecl()
-
- default:
- pos := p.pos
- p.errorExpected(pos, "declaration")
- p.next() // make progress
- decl := &ast.BadDecl{pos, p.pos}
- return decl
- }
-
- return p.parseGenDecl(p.tok, f)
-}
-
-func (p *parser) parseDeclList() (list []ast.Decl) {
- if p.trace {
- defer un(trace(p, "DeclList"))
- }
-
- for p.tok != token.EOF {
- list = append(list, p.parseDecl())
- }
-
- return
-}
-
-// ----------------------------------------------------------------------------
-// Source files
-
-func (p *parser) parseFile() *ast.File {
- if p.trace {
- defer un(trace(p, "File"))
- }
-
- // package clause
- doc := p.leadComment
- pos := p.expect(token.PACKAGE)
- // Go spec: The package clause is not a declaration;
- // the package name does not appear in any scope.
- ident := p.parseIdent()
- if ident.Name == "_" {
- p.error(p.pos, "invalid package name _")
- }
- p.expectSemi()
-
- var decls []ast.Decl
-
- // Don't bother parsing the rest if we had errors already.
- // Likely not a Go source file at all.
-
- if p.ErrorCount() == 0 && p.mode&PackageClauseOnly == 0 {
- // import decls
- for p.tok == token.IMPORT {
- decls = append(decls, p.parseGenDecl(token.IMPORT, parseImportSpec))
- }
-
- if p.mode&ImportsOnly == 0 {
- // rest of package body
- for p.tok != token.EOF {
- decls = append(decls, p.parseDecl())
- }
- }
- }
-
- assert(p.topScope == p.pkgScope, "imbalanced scopes")
-
- // resolve global identifiers within the same file
- i := 0
- for _, ident := range p.unresolved {
- // i <= index for current ident
- assert(ident.Obj == unresolved, "object already resolved")
- ident.Obj = p.pkgScope.Lookup(ident.Name) // also removes unresolved sentinel
- if ident.Obj == nil {
- p.unresolved[i] = ident
- i++
- }
- }
-
- // TODO(gri): store p.imports in AST
- return &ast.File{doc, pos, ident, decls, p.pkgScope, p.imports, p.unresolved[0:i], p.comments}
-}