summaryrefslogtreecommitdiff
path: root/src/pkg/go/parser
diff options
context:
space:
mode:
Diffstat (limited to 'src/pkg/go/parser')
-rw-r--r--src/pkg/go/parser/Makefile12
-rw-r--r--src/pkg/go/parser/error_test.go166
-rw-r--r--src/pkg/go/parser/example_test.go34
-rw-r--r--src/pkg/go/parser/interface.go183
-rw-r--r--src/pkg/go/parser/parser.go628
-rw-r--r--src/pkg/go/parser/parser_test.go193
-rw-r--r--src/pkg/go/parser/short_test.go75
-rw-r--r--src/pkg/go/parser/testdata/commas.src19
-rw-r--r--src/pkg/go/parser/testdata/issue3106.src46
9 files changed, 932 insertions, 424 deletions
diff --git a/src/pkg/go/parser/Makefile b/src/pkg/go/parser/Makefile
deleted file mode 100644
index d301f41eb..000000000
--- a/src/pkg/go/parser/Makefile
+++ /dev/null
@@ -1,12 +0,0 @@
-# Copyright 2009 The Go Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file.
-
-include ../../../Make.inc
-
-TARG=go/parser
-GOFILES=\
- interface.go\
- parser.go\
-
-include ../../../Make.pkg
diff --git a/src/pkg/go/parser/error_test.go b/src/pkg/go/parser/error_test.go
new file mode 100644
index 000000000..377c8b80c
--- /dev/null
+++ b/src/pkg/go/parser/error_test.go
@@ -0,0 +1,166 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file implements a parser test harness. The files in the testdata
+// directory are parsed and the errors reported are compared against the
+// error messages expected in the test files. The test files must end in
+// .src rather than .go so that they are not disturbed by gofmt runs.
+//
+// Expected errors are indicated in the test files by putting a comment
+// of the form /* ERROR "rx" */ immediately following an offending token.
+// The harness will verify that an error matching the regular expression
+// rx is reported at that source position.
+//
+// For instance, the following test file indicates that a "not declared"
+// error should be reported for the undeclared variable x:
+//
+// package p
+// func f() {
+// _ = x /* ERROR "not declared" */ + 1
+// }
+
+package parser
+
+import (
+ "go/scanner"
+ "go/token"
+ "io/ioutil"
+ "path/filepath"
+ "regexp"
+ "strings"
+ "testing"
+)
+
+const testdata = "testdata"
+
+// getFile assumes that each filename occurs at most once
+func getFile(filename string) (file *token.File) {
+ fset.Iterate(func(f *token.File) bool {
+ if f.Name() == filename {
+ if file != nil {
+ panic(filename + " used multiple times")
+ }
+ file = f
+ }
+ return true
+ })
+ return file
+}
+
+func getPos(filename string, offset int) token.Pos {
+ if f := getFile(filename); f != nil {
+ return f.Pos(offset)
+ }
+ return token.NoPos
+}
+
+// ERROR comments must be of the form /* ERROR "rx" */ and rx is
+// a regular expression that matches the expected error message.
+//
+var errRx = regexp.MustCompile(`^/\* *ERROR *"([^"]*)" *\*/$`)
+
+// expectedErrors collects the regular expressions of ERROR comments found
+// in files and returns them as a map of error positions to error messages.
+//
+func expectedErrors(t *testing.T, filename string, src []byte) map[token.Pos]string {
+ errors := make(map[token.Pos]string)
+
+ var s scanner.Scanner
+ // file was parsed already - do not add it again to the file
+ // set otherwise the position information returned here will
+ // not match the position information collected by the parser
+ s.Init(getFile(filename), src, nil, scanner.ScanComments)
+ var prev token.Pos // position of last non-comment, non-semicolon token
+
+ for {
+ pos, tok, lit := s.Scan()
+ switch tok {
+ case token.EOF:
+ return errors
+ case token.COMMENT:
+ s := errRx.FindStringSubmatch(lit)
+ if len(s) == 2 {
+ errors[prev] = string(s[1])
+ }
+ default:
+ prev = pos
+ }
+ }
+
+ panic("unreachable")
+}
+
+// compareErrors compares the map of expected error messages with the list
+// of found errors and reports discrepancies.
+//
+func compareErrors(t *testing.T, expected map[token.Pos]string, found scanner.ErrorList) {
+ for _, error := range found {
+ // error.Pos is a token.Position, but we want
+ // a token.Pos so we can do a map lookup
+ pos := getPos(error.Pos.Filename, error.Pos.Offset)
+ if msg, found := expected[pos]; found {
+ // we expect a message at pos; check if it matches
+ rx, err := regexp.Compile(msg)
+ if err != nil {
+ t.Errorf("%s: %v", error.Pos, err)
+ continue
+ }
+ if match := rx.MatchString(error.Msg); !match {
+ t.Errorf("%s: %q does not match %q", error.Pos, error.Msg, msg)
+ continue
+ }
+ // we have a match - eliminate this error
+ delete(expected, pos)
+ } else {
+ // To keep in mind when analyzing failed test output:
+ // If the same error position occurs multiple times in errors,
+ // this message will be triggered (because the first error at
+ // the position removes this position from the expected errors).
+ t.Errorf("%s: unexpected error: %s", error.Pos, error.Msg)
+ }
+ }
+
+ // there should be no expected errors left
+ if len(expected) > 0 {
+ t.Errorf("%d errors not reported:", len(expected))
+ for pos, msg := range expected {
+ t.Errorf("%s: %s\n", fset.Position(pos), msg)
+ }
+ }
+}
+
+func checkErrors(t *testing.T, filename string, input interface{}) {
+ src, err := readSource(filename, input)
+ if err != nil {
+ t.Error(err)
+ return
+ }
+
+ _, err = ParseFile(fset, filename, src, DeclarationErrors)
+ found, ok := err.(scanner.ErrorList)
+ if err != nil && !ok {
+ t.Error(err)
+ return
+ }
+
+ // we are expecting the following errors
+ // (collect these after parsing a file so that it is found in the file set)
+ expected := expectedErrors(t, filename, src)
+
+ // verify errors returned by the parser
+ compareErrors(t, expected, found)
+}
+
+func TestErrors(t *testing.T) {
+ list, err := ioutil.ReadDir(testdata)
+ if err != nil {
+ t.Fatal(err)
+ }
+ for _, fi := range list {
+ name := fi.Name()
+ if !fi.IsDir() && !strings.HasPrefix(name, ".") && strings.HasSuffix(name, ".src") {
+ checkErrors(t, filepath.Join(testdata, name), nil)
+ }
+ }
+}
diff --git a/src/pkg/go/parser/example_test.go b/src/pkg/go/parser/example_test.go
new file mode 100644
index 000000000..3c58e63a9
--- /dev/null
+++ b/src/pkg/go/parser/example_test.go
@@ -0,0 +1,34 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package parser_test
+
+import (
+ "fmt"
+ "go/parser"
+ "go/token"
+)
+
+func ExampleParseFile() {
+ fset := token.NewFileSet() // positions are relative to fset
+
+ // Parse the file containing this very example
+ // but stop after processing the imports.
+ f, err := parser.ParseFile(fset, "example_test.go", nil, parser.ImportsOnly)
+ if err != nil {
+ fmt.Println(err)
+ return
+ }
+
+ // Print the imports from the file's AST.
+ for _, s := range f.Imports {
+ fmt.Println(s.Path.Value)
+ }
+
+ // output:
+ //
+ // "fmt"
+ // "go/parser"
+ // "go/token"
+}
diff --git a/src/pkg/go/parser/interface.go b/src/pkg/go/parser/interface.go
index 4f980fc65..5c203a784 100644
--- a/src/pkg/go/parser/interface.go
+++ b/src/pkg/go/parser/interface.go
@@ -8,8 +8,8 @@ package parser
import (
"bytes"
+ "errors"
"go/ast"
- "go/scanner"
"go/token"
"io"
"io/ioutil"
@@ -21,7 +21,7 @@ import (
// otherwise it returns an error. If src == nil, readSource returns
// the result of reading the file specified by filename.
//
-func readSource(filename string, src interface{}) ([]byte, os.Error) {
+func readSource(filename string, src interface{}) ([]byte, error) {
if src != nil {
switch s := src.(type) {
case string:
@@ -35,86 +35,30 @@ func readSource(filename string, src interface{}) ([]byte, os.Error) {
}
case io.Reader:
var buf bytes.Buffer
- _, err := io.Copy(&buf, s)
- if err != nil {
+ if _, err := io.Copy(&buf, s); err != nil {
return nil, err
}
return buf.Bytes(), nil
- default:
- return nil, os.NewError("invalid source")
}
+ return nil, errors.New("invalid source")
}
-
return ioutil.ReadFile(filename)
}
-func (p *parser) errors() os.Error {
- mode := scanner.Sorted
- if p.mode&SpuriousErrors == 0 {
- mode = scanner.NoMultiples
- }
- return p.GetError(mode)
-}
-
-// ParseExpr parses a Go expression and returns the corresponding
-// AST node. The fset, filename, and src arguments have the same interpretation
-// as for ParseFile. If there is an error, the result expression
-// may be nil or contain a partial AST.
-//
-func ParseExpr(fset *token.FileSet, filename string, src interface{}) (ast.Expr, os.Error) {
- data, err := readSource(filename, src)
- if err != nil {
- return nil, err
- }
-
- var p parser
- p.init(fset, filename, data, 0)
- x := p.parseRhs()
- if p.tok == token.SEMICOLON {
- p.next() // consume automatically inserted semicolon, if any
- }
- p.expect(token.EOF)
-
- return x, p.errors()
-}
-
-// ParseStmtList parses a list of Go statements and returns the list
-// of corresponding AST nodes. The fset, filename, and src arguments have the same
-// interpretation as for ParseFile. If there is an error, the node
-// list may be nil or contain partial ASTs.
+// A Mode value is a set of flags (or 0).
+// They control the amount of source code parsed and other optional
+// parser functionality.
//
-func ParseStmtList(fset *token.FileSet, filename string, src interface{}) ([]ast.Stmt, os.Error) {
- data, err := readSource(filename, src)
- if err != nil {
- return nil, err
- }
-
- var p parser
- p.init(fset, filename, data, 0)
- list := p.parseStmtList()
- p.expect(token.EOF)
-
- return list, p.errors()
-}
-
-// ParseDeclList parses a list of Go declarations and returns the list
-// of corresponding AST nodes. The fset, filename, and src arguments have the same
-// interpretation as for ParseFile. If there is an error, the node
-// list may be nil or contain partial ASTs.
-//
-func ParseDeclList(fset *token.FileSet, filename string, src interface{}) ([]ast.Decl, os.Error) {
- data, err := readSource(filename, src)
- if err != nil {
- return nil, err
- }
-
- var p parser
- p.init(fset, filename, data, 0)
- list := p.parseDeclList()
- p.expect(token.EOF)
-
- return list, p.errors()
-}
+type Mode uint
+
+const (
+ PackageClauseOnly Mode = 1 << iota // parsing stops after package clause
+ ImportsOnly // parsing stops after import declarations
+ ParseComments // parse comments and add them to AST
+ Trace // print a trace of parsed productions
+ DeclarationErrors // report declaration errors
+ SpuriousErrors // report all (not just the first) errors per line
+)
// ParseFile parses the source code of a single Go source file and returns
// the corresponding ast.File node. The source code may be provided via
@@ -123,7 +67,6 @@ func ParseDeclList(fset *token.FileSet, filename string, src interface{}) ([]ast
// If src != nil, ParseFile parses the source from src and the filename is
// only used when recording position information. The type of the argument
// for the src parameter must be string, []byte, or io.Reader.
-//
// If src == nil, ParseFile parses the file specified by filename.
//
// The mode parameter controls the amount of source text parsed and other
@@ -132,49 +75,30 @@ func ParseDeclList(fset *token.FileSet, filename string, src interface{}) ([]ast
//
// If the source couldn't be read, the returned AST is nil and the error
// indicates the specific failure. If the source was read but syntax
-// errors were found, the result is a partial AST (with ast.BadX nodes
+// errors were found, the result is a partial AST (with ast.Bad* nodes
// representing the fragments of erroneous source code). Multiple errors
// are returned via a scanner.ErrorList which is sorted by file position.
//
-func ParseFile(fset *token.FileSet, filename string, src interface{}, mode uint) (*ast.File, os.Error) {
- data, err := readSource(filename, src)
+func ParseFile(fset *token.FileSet, filename string, src interface{}, mode Mode) (*ast.File, error) {
+ // get source
+ text, err := readSource(filename, src)
if err != nil {
return nil, err
}
+ // parse source
var p parser
- p.init(fset, filename, data, mode)
- file := p.parseFile() // parseFile reads to EOF
+ p.init(fset, filename, text, mode)
+ f := p.parseFile()
- return file, p.errors()
-}
-
-// ParseFiles calls ParseFile for each file in the filenames list and returns
-// a map of package name -> package AST with all the packages found. The mode
-// bits are passed to ParseFile unchanged. Position information is recorded
-// in the file set fset.
-//
-// Files with parse errors are ignored. In this case the map of packages may
-// be incomplete (missing packages and/or incomplete packages) and the first
-// error encountered is returned.
-//
-func ParseFiles(fset *token.FileSet, filenames []string, mode uint) (pkgs map[string]*ast.Package, first os.Error) {
- pkgs = make(map[string]*ast.Package)
- for _, filename := range filenames {
- if src, err := ParseFile(fset, filename, nil, mode); err == nil {
- name := src.Name.Name
- pkg, found := pkgs[name]
- if !found {
- // TODO(gri) Use NewPackage here; reconsider ParseFiles API.
- pkg = &ast.Package{name, nil, nil, make(map[string]*ast.File)}
- pkgs[name] = pkg
- }
- pkg.Files[filename] = src
- } else if first == nil {
- first = err
- }
+ // sort errors
+ if p.mode&SpuriousErrors == 0 {
+ p.errors.RemoveMultiples()
+ } else {
+ p.errors.Sort()
}
- return
+
+ return f, p.errors.Err()
}
// ParseDir calls ParseFile for the files in the directory specified by path and
@@ -185,9 +109,9 @@ func ParseFiles(fset *token.FileSet, filenames []string, mode uint) (pkgs map[st
//
// If the directory couldn't be read, a nil map and the respective error are
// returned. If a parse error occurred, a non-nil but incomplete map and the
-// error are returned.
+// first error encountered are returned.
//
-func ParseDir(fset *token.FileSet, path string, filter func(*os.FileInfo) bool, mode uint) (map[string]*ast.Package, os.Error) {
+func ParseDir(fset *token.FileSet, path string, filter func(os.FileInfo) bool, mode Mode) (pkgs map[string]*ast.Package, first error) {
fd, err := os.Open(path)
if err != nil {
return nil, err
@@ -199,16 +123,41 @@ func ParseDir(fset *token.FileSet, path string, filter func(*os.FileInfo) bool,
return nil, err
}
- filenames := make([]string, len(list))
- n := 0
- for i := 0; i < len(list); i++ {
- d := &list[i]
+ pkgs = make(map[string]*ast.Package)
+ for _, d := range list {
if filter == nil || filter(d) {
- filenames[n] = filepath.Join(path, d.Name)
- n++
+ filename := filepath.Join(path, d.Name())
+ if src, err := ParseFile(fset, filename, nil, mode); err == nil {
+ name := src.Name.Name
+ pkg, found := pkgs[name]
+ if !found {
+ pkg = &ast.Package{
+ Name: name,
+ Files: make(map[string]*ast.File),
+ }
+ pkgs[name] = pkg
+ }
+ pkg.Files[filename] = src
+ } else if first == nil {
+ first = err
+ }
}
}
- filenames = filenames[0:n]
- return ParseFiles(fset, filenames, mode)
+ return
+}
+
+// ParseExpr is a convenience function for obtaining the AST of an expression x.
+// The position information recorded in the AST is undefined.
+//
+func ParseExpr(x string) (ast.Expr, error) {
+ // parse x within the context of a complete package for correct scopes;
+ // use //line directive for correct positions in error messages and put
+ // x alone on a separate line (handles line comments), followed by a ';'
+ // to force an error if the expression is incomplete
+ file, err := ParseFile(token.NewFileSet(), "", "package p;func _(){_=\n//line :1\n"+x+"\n;}", 0)
+ if err != nil {
+ return nil, err
+ }
+ return file.Decls[0].(*ast.FuncDecl).Body.List[0].(*ast.AssignStmt).Rhs[0], nil
}
diff --git a/src/pkg/go/parser/parser.go b/src/pkg/go/parser/parser.go
index 9c14d1667..e362e13a7 100644
--- a/src/pkg/go/parser/parser.go
+++ b/src/pkg/go/parser/parser.go
@@ -14,29 +14,19 @@ import (
"go/ast"
"go/scanner"
"go/token"
-)
-
-// The mode parameter to the Parse* functions is a set of flags (or 0).
-// They control the amount of source code parsed and other optional
-// parser functionality.
-//
-const (
- PackageClauseOnly uint = 1 << iota // parsing stops after package clause
- ImportsOnly // parsing stops after import declarations
- ParseComments // parse comments and add them to AST
- Trace // print a trace of parsed productions
- DeclarationErrors // report declaration errors
- SpuriousErrors // report all (not just the first) errors per line
+ "strconv"
+ "strings"
+ "unicode"
)
// The parser structure holds the parser's internal state.
type parser struct {
- file *token.File
- scanner.ErrorVector
+ file *token.File
+ errors scanner.ErrorList
scanner scanner.Scanner
// Tracing/debugging
- mode uint // parsing mode
+ mode Mode // parsing mode
trace bool // == (mode & Trace != 0)
indent uint // indentation used for tracing output
@@ -50,6 +40,13 @@ type parser struct {
tok token.Token // one token look-ahead
lit string // token literal
+ // Error recovery
+ // (used to limit the number of calls to syncXXX functions
+ // w/o making scanning progress - avoids potential endless
+ // loops across multiple parser functions during error recovery)
+ syncPos token.Pos // last synchronization position
+ syncCnt int // number of calls to syncXXX without progress
+
// Non-syntactic parser control
exprLev int // < 0: in control clause, >= 0: in expression
@@ -65,18 +62,14 @@ type parser struct {
targetStack [][]*ast.Ident // stack of unresolved labels
}
-// scannerMode returns the scanner mode bits given the parser's mode bits.
-func scannerMode(mode uint) uint {
- var m uint = scanner.InsertSemis
+func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode Mode) {
+ p.file = fset.AddFile(filename, fset.Base(), len(src))
+ var m scanner.Mode
if mode&ParseComments != 0 {
- m |= scanner.ScanComments
+ m = scanner.ScanComments
}
- return m
-}
-
-func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode uint) {
- p.file = fset.AddFile(filename, fset.Base(), len(src))
- p.scanner.Init(p.file, src, p, scannerMode(mode))
+ eh := func(pos token.Position, msg string) { p.errors.Add(pos, msg) }
+ p.scanner.Init(p.file, src, eh, m)
p.mode = mode
p.trace = mode&Trace != 0 // for convenience (p.trace is used frequently)
@@ -144,28 +137,31 @@ func (p *parser) declare(decl, data interface{}, scope *ast.Scope, kind ast.ObjK
}
}
-func (p *parser) shortVarDecl(idents []*ast.Ident) {
+func (p *parser) shortVarDecl(decl *ast.AssignStmt, list []ast.Expr) {
// Go spec: A short variable declaration may redeclare variables
// provided they were originally declared in the same block with
// the same type, and at least one of the non-blank variables is new.
n := 0 // number of new variables
- for _, ident := range idents {
- assert(ident.Obj == nil, "identifier already declared or resolved")
- obj := ast.NewObj(ast.Var, ident.Name)
- // short var declarations cannot have redeclaration errors
- // and are not global => no need to remember the respective
- // declaration
- ident.Obj = obj
- if ident.Name != "_" {
- if alt := p.topScope.Insert(obj); alt != nil {
- ident.Obj = alt // redeclaration
- } else {
- n++ // new declaration
+ for _, x := range list {
+ if ident, isIdent := x.(*ast.Ident); isIdent {
+ assert(ident.Obj == nil, "identifier already declared or resolved")
+ obj := ast.NewObj(ast.Var, ident.Name)
+ // remember corresponding assignment for other tools
+ obj.Decl = decl
+ ident.Obj = obj
+ if ident.Name != "_" {
+ if alt := p.topScope.Insert(obj); alt != nil {
+ ident.Obj = alt // redeclaration
+ } else {
+ n++ // new declaration
+ }
}
+ } else {
+ p.errorExpected(x.Pos(), "identifier")
}
}
if n == 0 && p.mode&DeclarationErrors != 0 {
- p.error(idents[0].Pos(), "no new variables on left side of :=")
+ p.error(list[0].Pos(), "no new variables on left side of :=")
}
}
@@ -263,7 +259,7 @@ func (p *parser) consumeComment() (comment *ast.Comment, endline int) {
}
}
- comment = &ast.Comment{p.pos, p.lit}
+ comment = &ast.Comment{Slash: p.pos, Text: p.lit}
p.next0()
return
@@ -284,7 +280,7 @@ func (p *parser) consumeCommentGroup() (comments *ast.CommentGroup, endline int)
}
// add comment group to the comments list
- comments = &ast.CommentGroup{list}
+ comments = &ast.CommentGroup{List: list}
p.comments = append(p.comments, comments)
return
@@ -341,7 +337,7 @@ func (p *parser) next() {
}
func (p *parser) error(pos token.Pos, msg string) {
- p.Error(p.file.Position(pos), msg)
+ p.errors.Add(p.file.Position(pos), msg)
}
func (p *parser) errorExpected(pos token.Pos, msg string) {
@@ -349,7 +345,7 @@ func (p *parser) errorExpected(pos token.Pos, msg string) {
if pos == p.pos {
// the error happened at the current position;
// make the error message more specific
- if p.tok == token.SEMICOLON && p.lit[0] == '\n' {
+ if p.tok == token.SEMICOLON && p.lit == "\n" {
msg += ", found newline"
} else {
msg += ", found '" + p.tok.String() + "'"
@@ -370,10 +366,39 @@ func (p *parser) expect(tok token.Token) token.Pos {
return pos
}
+// expectClosing is like expect but provides a better error message
+// for the common case of a missing comma before a newline.
+//
+func (p *parser) expectClosing(tok token.Token, context string) token.Pos {
+ if p.tok != tok && p.tok == token.SEMICOLON && p.lit == "\n" {
+ p.error(p.pos, "missing ',' before newline in "+context)
+ p.next()
+ }
+ return p.expect(tok)
+}
+
func (p *parser) expectSemi() {
+ // semicolon is optional before a closing ')' or '}'
if p.tok != token.RPAREN && p.tok != token.RBRACE {
- p.expect(token.SEMICOLON)
+ if p.tok == token.SEMICOLON {
+ p.next()
+ } else {
+ p.errorExpected(p.pos, "';'")
+ syncStmt(p)
+ }
+ }
+}
+
+func (p *parser) atComma(context string) bool {
+ if p.tok == token.COMMA {
+ return true
}
+ if p.tok == token.SEMICOLON && p.lit == "\n" {
+ p.error(p.pos, "missing ',' before newline in "+context)
+ return true // "insert" the comma and continue
+
+ }
+ return false
}
func assert(cond bool, msg string) {
@@ -382,6 +407,68 @@ func assert(cond bool, msg string) {
}
}
+// syncStmt advances to the next statement.
+// Used for synchronization after an error.
+//
+func syncStmt(p *parser) {
+ for {
+ switch p.tok {
+ case token.BREAK, token.CONST, token.CONTINUE, token.DEFER,
+ token.FALLTHROUGH, token.FOR, token.GO, token.GOTO,
+ token.IF, token.RETURN, token.SELECT, token.SWITCH,
+ token.TYPE, token.VAR:
+ // Return only if parser made some progress since last
+ // sync or if it has not reached 10 sync calls without
+ // progress. Otherwise consume at least one token to
+ // avoid an endless parser loop (it is possible that
+ // both parseOperand and parseStmt call syncStmt and
+ // correctly do not advance, thus the need for the
+ // invocation limit p.syncCnt).
+ if p.pos == p.syncPos && p.syncCnt < 10 {
+ p.syncCnt++
+ return
+ }
+ if p.pos > p.syncPos {
+ p.syncPos = p.pos
+ p.syncCnt = 0
+ return
+ }
+ // Reaching here indicates a parser bug, likely an
+ // incorrect token list in this function, but it only
+ // leads to skipping of possibly correct code if a
+ // previous error is present, and thus is preferred
+ // over a non-terminating parse.
+ case token.EOF:
+ return
+ }
+ p.next()
+ }
+}
+
+// syncDecl advances to the next declaration.
+// Used for synchronization after an error.
+//
+func syncDecl(p *parser) {
+ for {
+ switch p.tok {
+ case token.CONST, token.TYPE, token.VAR:
+ // see comments in syncStmt
+ if p.pos == p.syncPos && p.syncCnt < 10 {
+ p.syncCnt++
+ return
+ }
+ if p.pos > p.syncPos {
+ p.syncPos = p.pos
+ p.syncCnt = 0
+ return
+ }
+ case token.EOF:
+ return
+ }
+ p.next()
+ }
+}
+
// ----------------------------------------------------------------------------
// Identifiers
@@ -394,7 +481,7 @@ func (p *parser) parseIdent() *ast.Ident {
} else {
p.expect(token.IDENT) // use expect() error handling
}
- return &ast.Ident{pos, name, nil}
+ return &ast.Ident{NamePos: pos, Name: name}
}
func (p *parser) parseIdentList() (list []*ast.Ident) {
@@ -434,7 +521,9 @@ func (p *parser) parseLhsList() []ast.Expr {
switch p.tok {
case token.DEFINE:
// lhs of a short variable declaration
- p.shortVarDecl(p.makeIdentList(list))
+ // but doesn't enter scope until later:
+ // caller must call p.shortVarDecl(p.makeIdentList(list))
+ // at appropriate time.
case token.COLON:
// lhs of a label declaration or a communication clause of a select
// statement (parseLhsList is not called when parsing the case clause
@@ -470,7 +559,7 @@ func (p *parser) parseType() ast.Expr {
pos := p.pos
p.errorExpected(pos, "type")
p.next() // make progress
- return &ast.BadExpr{pos, p.pos}
+ return &ast.BadExpr{From: pos, To: p.pos}
}
return typ
@@ -490,7 +579,7 @@ func (p *parser) parseTypeName() ast.Expr {
p.next()
p.resolve(ident)
sel := p.parseIdent()
- return &ast.SelectorExpr{ident, sel}
+ return &ast.SelectorExpr{X: ident, Sel: sel}
}
return ident
@@ -504,7 +593,7 @@ func (p *parser) parseArrayType(ellipsisOk bool) ast.Expr {
lbrack := p.expect(token.LBRACK)
var len ast.Expr
if ellipsisOk && p.tok == token.ELLIPSIS {
- len = &ast.Ellipsis{p.pos, nil}
+ len = &ast.Ellipsis{Ellipsis: p.pos}
p.next()
} else if p.tok != token.RBRACK {
len = p.parseRhs()
@@ -512,7 +601,7 @@ func (p *parser) parseArrayType(ellipsisOk bool) ast.Expr {
p.expect(token.RBRACK)
elt := p.parseType()
- return &ast.ArrayType{lbrack, len, elt}
+ return &ast.ArrayType{Lbrack: lbrack, Len: len, Elt: elt}
}
func (p *parser) makeIdentList(list []ast.Expr) []*ast.Ident {
@@ -520,9 +609,11 @@ func (p *parser) makeIdentList(list []ast.Expr) []*ast.Ident {
for i, x := range list {
ident, isIdent := x.(*ast.Ident)
if !isIdent {
- pos := x.(ast.Expr).Pos()
- p.errorExpected(pos, "identifier")
- ident = &ast.Ident{pos, "_", nil}
+ if _, isBad := x.(*ast.BadExpr); !isBad {
+ // only report error if it's a new one
+ p.errorExpected(x.Pos(), "identifier")
+ }
+ ident = &ast.Ident{NamePos: x.Pos(), Name: "_"}
}
idents[i] = ident
}
@@ -542,7 +633,7 @@ func (p *parser) parseFieldDecl(scope *ast.Scope) *ast.Field {
// optional tag
var tag *ast.BasicLit
if p.tok == token.STRING {
- tag = &ast.BasicLit{p.pos, p.tok, p.lit}
+ tag = &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
p.next()
}
@@ -558,13 +649,13 @@ func (p *parser) parseFieldDecl(scope *ast.Scope) *ast.Field {
if n := len(list); n > 1 || !isTypeName(deref(typ)) {
pos := typ.Pos()
p.errorExpected(pos, "anonymous field")
- typ = &ast.BadExpr{pos, list[n-1].End()}
+ typ = &ast.BadExpr{From: pos, To: list[n-1].End()}
}
}
p.expectSemi() // call before accessing p.linecomment
- field := &ast.Field{doc, idents, typ, tag, p.lineComment}
+ field := &ast.Field{Doc: doc, Names: idents, Type: typ, Tag: tag, Comment: p.lineComment}
p.declare(field, nil, scope, ast.Var, idents...)
return field
@@ -587,8 +678,14 @@ func (p *parser) parseStructType() *ast.StructType {
}
rbrace := p.expect(token.RBRACE)
- // TODO(gri): store struct scope in AST
- return &ast.StructType{pos, &ast.FieldList{lbrace, list, rbrace}, false}
+ return &ast.StructType{
+ Struct: pos,
+ Fields: &ast.FieldList{
+ Opening: lbrace,
+ List: list,
+ Closing: rbrace,
+ },
+ }
}
func (p *parser) parsePointerType() *ast.StarExpr {
@@ -599,7 +696,7 @@ func (p *parser) parsePointerType() *ast.StarExpr {
star := p.expect(token.MUL)
base := p.parseType()
- return &ast.StarExpr{star, base}
+ return &ast.StarExpr{Star: star, X: base}
}
func (p *parser) tryVarType(isParam bool) ast.Expr {
@@ -609,12 +706,9 @@ func (p *parser) tryVarType(isParam bool) ast.Expr {
typ := p.tryIdentOrType(isParam) // don't use parseType so we can provide better error message
if typ == nil {
p.error(pos, "'...' parameter is missing type")
- typ = &ast.BadExpr{pos, p.pos}
+ typ = &ast.BadExpr{From: pos, To: p.pos}
}
- if p.tok != token.RPAREN {
- p.error(pos, "can use '...' with last parameter type only")
- }
- return &ast.Ellipsis{pos, typ}
+ return &ast.Ellipsis{Ellipsis: pos, Elt: typ}
}
return p.tryIdentOrType(false)
}
@@ -625,7 +719,7 @@ func (p *parser) parseVarType(isParam bool) ast.Expr {
pos := p.pos
p.errorExpected(pos, "type")
p.next() // make progress
- typ = &ast.BadExpr{pos, p.pos}
+ typ = &ast.BadExpr{From: pos, To: p.pos}
}
return typ
}
@@ -636,21 +730,21 @@ func (p *parser) parseVarList(isParam bool) (list []ast.Expr, typ ast.Expr) {
}
// a list of identifiers looks like a list of type names
- for {
- // parseVarType accepts any type (including parenthesized ones)
- // even though the syntax does not permit them here: we
- // accept them all for more robust parsing and complain
- // afterwards
- list = append(list, p.parseVarType(isParam))
+ //
+ // parse/tryVarType accepts any type (including parenthesized
+ // ones) even though the syntax does not permit them here: we
+ // accept them all for more robust parsing and complain later
+ for typ := p.parseVarType(isParam); typ != nil; {
+ list = append(list, typ)
if p.tok != token.COMMA {
break
}
p.next()
+ typ = p.tryVarType(isParam) // maybe nil as in: func f(int,) {}
}
// if we had a list of identifiers, it must be followed by a type
- typ = p.tryVarType(isParam)
- if typ != nil {
+ if typ = p.tryVarType(isParam); typ != nil {
p.resolve(typ)
}
@@ -666,7 +760,7 @@ func (p *parser) parseParameterList(scope *ast.Scope, ellipsisOk bool) (params [
if typ != nil {
// IdentifierList Type
idents := p.makeIdentList(list)
- field := &ast.Field{nil, idents, typ, nil, nil}
+ field := &ast.Field{Names: idents, Type: typ}
params = append(params, field)
// Go spec: The scope of an identifier denoting a function
// parameter or result variable is the function body.
@@ -678,12 +772,12 @@ func (p *parser) parseParameterList(scope *ast.Scope, ellipsisOk bool) (params [
for p.tok != token.RPAREN && p.tok != token.EOF {
idents := p.parseIdentList()
typ := p.parseVarType(ellipsisOk)
- field := &ast.Field{nil, idents, typ, nil, nil}
+ field := &ast.Field{Names: idents, Type: typ}
params = append(params, field)
// Go spec: The scope of an identifier denoting a function
// parameter or result variable is the function body.
p.declare(field, nil, scope, ast.Var, idents...)
- if p.tok != token.COMMA {
+ if !p.atComma("parameter list") {
break
}
p.next()
@@ -713,7 +807,7 @@ func (p *parser) parseParameters(scope *ast.Scope, ellipsisOk bool) *ast.FieldLi
}
rparen := p.expect(token.RPAREN)
- return &ast.FieldList{lparen, params, rparen}
+ return &ast.FieldList{Opening: lparen, List: params, Closing: rparen}
}
func (p *parser) parseResult(scope *ast.Scope) *ast.FieldList {
@@ -755,7 +849,7 @@ func (p *parser) parseFuncType() (*ast.FuncType, *ast.Scope) {
scope := ast.NewScope(p.topScope) // function scope
params, results := p.parseSignature(scope)
- return &ast.FuncType{pos, params, results}, scope
+ return &ast.FuncType{Func: pos, Params: params, Results: results}, scope
}
func (p *parser) parseMethodSpec(scope *ast.Scope) *ast.Field {
@@ -772,7 +866,7 @@ func (p *parser) parseMethodSpec(scope *ast.Scope) *ast.Field {
idents = []*ast.Ident{ident}
scope := ast.NewScope(nil) // method scope
params, results := p.parseSignature(scope)
- typ = &ast.FuncType{token.NoPos, params, results}
+ typ = &ast.FuncType{Func: token.NoPos, Params: params, Results: results}
} else {
// embedded interface
typ = x
@@ -780,7 +874,7 @@ func (p *parser) parseMethodSpec(scope *ast.Scope) *ast.Field {
}
p.expectSemi() // call before accessing p.linecomment
- spec := &ast.Field{doc, idents, typ, nil, p.lineComment}
+ spec := &ast.Field{Doc: doc, Names: idents, Type: typ, Comment: p.lineComment}
p.declare(spec, nil, scope, ast.Fun, idents...)
return spec
@@ -800,8 +894,14 @@ func (p *parser) parseInterfaceType() *ast.InterfaceType {
}
rbrace := p.expect(token.RBRACE)
- // TODO(gri): store interface scope in AST
- return &ast.InterfaceType{pos, &ast.FieldList{lbrace, list, rbrace}, false}
+ return &ast.InterfaceType{
+ Interface: pos,
+ Methods: &ast.FieldList{
+ Opening: lbrace,
+ List: list,
+ Closing: rbrace,
+ },
+ }
}
func (p *parser) parseMapType() *ast.MapType {
@@ -815,7 +915,7 @@ func (p *parser) parseMapType() *ast.MapType {
p.expect(token.RBRACK)
value := p.parseType()
- return &ast.MapType{pos, key, value}
+ return &ast.MapType{Map: pos, Key: key, Value: value}
}
func (p *parser) parseChanType() *ast.ChanType {
@@ -838,7 +938,7 @@ func (p *parser) parseChanType() *ast.ChanType {
}
value := p.parseType()
- return &ast.ChanType{pos, dir, value}
+ return &ast.ChanType{Begin: pos, Dir: dir, Value: value}
}
// If the result is an identifier, it is not resolved.
@@ -866,7 +966,7 @@ func (p *parser) tryIdentOrType(ellipsisOk bool) ast.Expr {
p.next()
typ := p.parseType()
rparen := p.expect(token.RPAREN)
- return &ast.ParenExpr{lparen, typ, rparen}
+ return &ast.ParenExpr{Lparen: lparen, X: typ, Rparen: rparen}
}
// no type found
@@ -909,7 +1009,7 @@ func (p *parser) parseBody(scope *ast.Scope) *ast.BlockStmt {
p.closeScope()
rbrace := p.expect(token.RBRACE)
- return &ast.BlockStmt{lbrace, list, rbrace}
+ return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
}
func (p *parser) parseBlockStmt() *ast.BlockStmt {
@@ -923,7 +1023,7 @@ func (p *parser) parseBlockStmt() *ast.BlockStmt {
p.closeScope()
rbrace := p.expect(token.RBRACE)
- return &ast.BlockStmt{lbrace, list, rbrace}
+ return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
}
// ----------------------------------------------------------------------------
@@ -944,7 +1044,7 @@ func (p *parser) parseFuncTypeOrLit() ast.Expr {
body := p.parseBody(scope)
p.exprLev--
- return &ast.FuncLit{typ, body}
+ return &ast.FuncLit{Type: typ, Body: body}
}
// parseOperand may return an expression or a raw type (incl. array
@@ -965,7 +1065,7 @@ func (p *parser) parseOperand(lhs bool) ast.Expr {
return x
case token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING:
- x := &ast.BasicLit{p.pos, p.tok, p.lit}
+ x := &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
p.next()
return x
@@ -976,24 +1076,24 @@ func (p *parser) parseOperand(lhs bool) ast.Expr {
x := p.parseRhsOrType() // types may be parenthesized: (some type)
p.exprLev--
rparen := p.expect(token.RPAREN)
- return &ast.ParenExpr{lparen, x, rparen}
+ return &ast.ParenExpr{Lparen: lparen, X: x, Rparen: rparen}
case token.FUNC:
return p.parseFuncTypeOrLit()
+ }
- default:
- if typ := p.tryIdentOrType(true); typ != nil {
- // could be type for composite literal or conversion
- _, isIdent := typ.(*ast.Ident)
- assert(!isIdent, "type cannot be identifier")
- return typ
- }
+ if typ := p.tryIdentOrType(true); typ != nil {
+ // could be type for composite literal or conversion
+ _, isIdent := typ.(*ast.Ident)
+ assert(!isIdent, "type cannot be identifier")
+ return typ
}
+ // we have an error
pos := p.pos
p.errorExpected(pos, "operand")
- p.next() // make progress
- return &ast.BadExpr{pos, p.pos}
+ syncStmt(p)
+ return &ast.BadExpr{From: pos, To: p.pos}
}
func (p *parser) parseSelector(x ast.Expr) ast.Expr {
@@ -1003,7 +1103,7 @@ func (p *parser) parseSelector(x ast.Expr) ast.Expr {
sel := p.parseIdent()
- return &ast.SelectorExpr{x, sel}
+ return &ast.SelectorExpr{X: x, Sel: sel}
}
func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr {
@@ -1021,7 +1121,7 @@ func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr {
}
p.expect(token.RPAREN)
- return &ast.TypeAssertExpr{x, typ}
+ return &ast.TypeAssertExpr{X: x, Type: typ}
}
func (p *parser) parseIndexOrSlice(x ast.Expr) ast.Expr {
@@ -1047,9 +1147,9 @@ func (p *parser) parseIndexOrSlice(x ast.Expr) ast.Expr {
rbrack := p.expect(token.RBRACK)
if isSlice {
- return &ast.SliceExpr{x, lbrack, low, high, rbrack}
+ return &ast.SliceExpr{X: x, Lbrack: lbrack, Low: low, High: high, Rbrack: rbrack}
}
- return &ast.IndexExpr{x, lbrack, low, rbrack}
+ return &ast.IndexExpr{X: x, Lbrack: lbrack, Index: low, Rbrack: rbrack}
}
func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr {
@@ -1067,15 +1167,15 @@ func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr {
ellipsis = p.pos
p.next()
}
- if p.tok != token.COMMA {
+ if !p.atComma("argument list") {
break
}
p.next()
}
p.exprLev--
- rparen := p.expect(token.RPAREN)
+ rparen := p.expectClosing(token.RPAREN, "argument list")
- return &ast.CallExpr{fun, lparen, list, ellipsis, rparen}
+ return &ast.CallExpr{Fun: fun, Lparen: lparen, Args: list, Ellipsis: ellipsis, Rparen: rparen}
}
func (p *parser) parseElement(keyOk bool) ast.Expr {
@@ -1092,7 +1192,7 @@ func (p *parser) parseElement(keyOk bool) ast.Expr {
if p.tok == token.COLON {
colon := p.pos
p.next()
- return &ast.KeyValueExpr{x, colon, p.parseElement(false)}
+ return &ast.KeyValueExpr{Key: x, Colon: colon, Value: p.parseElement(false)}
}
p.resolve(x) // not a map key
}
@@ -1107,7 +1207,7 @@ func (p *parser) parseElementList() (list []ast.Expr) {
for p.tok != token.RBRACE && p.tok != token.EOF {
list = append(list, p.parseElement(true))
- if p.tok != token.COMMA {
+ if !p.atComma("composite literal") {
break
}
p.next()
@@ -1128,13 +1228,13 @@ func (p *parser) parseLiteralValue(typ ast.Expr) ast.Expr {
elts = p.parseElementList()
}
p.exprLev--
- rbrace := p.expect(token.RBRACE)
- return &ast.CompositeLit{typ, lbrace, elts, rbrace}
+ rbrace := p.expectClosing(token.RBRACE, "composite literal")
+ return &ast.CompositeLit{Type: typ, Lbrace: lbrace, Elts: elts, Rbrace: rbrace}
}
// checkExpr checks that x is an expression (and not a type).
func (p *parser) checkExpr(x ast.Expr) ast.Expr {
- switch t := unparen(x).(type) {
+ switch unparen(x).(type) {
case *ast.BadExpr:
case *ast.Ident:
case *ast.BasicLit:
@@ -1158,7 +1258,7 @@ func (p *parser) checkExpr(x ast.Expr) ast.Expr {
default:
// all other nodes are not proper expressions
p.errorExpected(x.Pos(), "expression")
- x = &ast.BadExpr{x.Pos(), x.End()}
+ x = &ast.BadExpr{From: x.Pos(), To: x.End()}
}
return x
}
@@ -1221,7 +1321,7 @@ func (p *parser) checkExprOrType(x ast.Expr) ast.Expr {
case *ast.ArrayType:
if len, isEllipsis := t.Len.(*ast.Ellipsis); isEllipsis {
p.error(len.Pos(), "expected array length, found '...'")
- x = &ast.BadExpr{x.Pos(), x.End()}
+ x = &ast.BadExpr{From: x.Pos(), To: x.End()}
}
}
@@ -1251,9 +1351,9 @@ L:
x = p.parseTypeAssertion(p.checkExpr(x))
default:
pos := p.pos
- p.next() // make progress
p.errorExpected(pos, "selector or type assertion")
- x = &ast.BadExpr{pos, p.pos}
+ p.next() // make progress
+ x = &ast.BadExpr{From: pos, To: p.pos}
}
case token.LBRACK:
if lhs {
@@ -1294,7 +1394,7 @@ func (p *parser) parseUnaryExpr(lhs bool) ast.Expr {
pos, op := p.pos, p.tok
p.next()
x := p.parseUnaryExpr(false)
- return &ast.UnaryExpr{pos, op, p.checkExpr(x)}
+ return &ast.UnaryExpr{OpPos: pos, Op: op, X: p.checkExpr(x)}
case token.ARROW:
// channel type or receive expression
@@ -1303,18 +1403,18 @@ func (p *parser) parseUnaryExpr(lhs bool) ast.Expr {
if p.tok == token.CHAN {
p.next()
value := p.parseType()
- return &ast.ChanType{pos, ast.RECV, value}
+ return &ast.ChanType{Begin: pos, Dir: ast.RECV, Value: value}
}
x := p.parseUnaryExpr(false)
- return &ast.UnaryExpr{pos, token.ARROW, p.checkExpr(x)}
+ return &ast.UnaryExpr{OpPos: pos, Op: token.ARROW, X: p.checkExpr(x)}
case token.MUL:
// pointer type or unary "*" expression
pos := p.pos
p.next()
x := p.parseUnaryExpr(false)
- return &ast.StarExpr{pos, p.checkExprOrType(x)}
+ return &ast.StarExpr{Star: pos, X: p.checkExprOrType(x)}
}
return p.parsePrimaryExpr(lhs)
@@ -1336,7 +1436,7 @@ func (p *parser) parseBinaryExpr(lhs bool, prec1 int) ast.Expr {
lhs = false
}
y := p.parseBinaryExpr(false, prec+1)
- x = &ast.BinaryExpr{p.checkExpr(x), pos, op, p.checkExpr(y)}
+ x = &ast.BinaryExpr{X: p.checkExpr(x), OpPos: pos, Op: op, Y: p.checkExpr(y)}
}
}
@@ -1398,12 +1498,16 @@ func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) {
if mode == rangeOk && p.tok == token.RANGE && (tok == token.DEFINE || tok == token.ASSIGN) {
pos := p.pos
p.next()
- y = []ast.Expr{&ast.UnaryExpr{pos, token.RANGE, p.parseRhs()}}
+ y = []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
isRange = true
} else {
y = p.parseRhsList()
}
- return &ast.AssignStmt{x, pos, tok, y}, isRange
+ as := &ast.AssignStmt{Lhs: x, TokPos: pos, Tok: tok, Rhs: y}
+ if tok == token.DEFINE {
+ p.shortVarDecl(as, x)
+ }
+ return as, isRange
}
if len(x) > 1 {
@@ -1420,7 +1524,7 @@ func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) {
// Go spec: The scope of a label is the body of the function
// in which it is declared and excludes the body of any nested
// function.
- stmt := &ast.LabeledStmt{label, colon, p.parseStmt()}
+ stmt := &ast.LabeledStmt{Label: label, Colon: colon, Stmt: p.parseStmt()}
p.declare(stmt, nil, p.labelScope, ast.Lbl, label)
return stmt, false
}
@@ -1431,24 +1535,24 @@ func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) {
// before the ':' that caused the problem. Thus, use the (latest) colon
// position for error reporting.
p.error(colon, "illegal label declaration")
- return &ast.BadStmt{x[0].Pos(), colon + 1}, false
+ return &ast.BadStmt{From: x[0].Pos(), To: colon + 1}, false
case token.ARROW:
// send statement
arrow := p.pos
- p.next() // consume "<-"
+ p.next()
y := p.parseRhs()
- return &ast.SendStmt{x[0], arrow, y}, false
+ return &ast.SendStmt{Chan: x[0], Arrow: arrow, Value: y}, false
case token.INC, token.DEC:
// increment or decrement
- s := &ast.IncDecStmt{x[0], p.pos, p.tok}
- p.next() // consume "++" or "--"
+ s := &ast.IncDecStmt{X: x[0], TokPos: p.pos, Tok: p.tok}
+ p.next()
return s, false
}
// expression
- return &ast.ExprStmt{x[0]}, false
+ return &ast.ExprStmt{X: x[0]}, false
}
func (p *parser) parseCallExpr() *ast.CallExpr {
@@ -1456,7 +1560,10 @@ func (p *parser) parseCallExpr() *ast.CallExpr {
if call, isCall := x.(*ast.CallExpr); isCall {
return call
}
- p.errorExpected(x.Pos(), "function/method call")
+ if _, isBad := x.(*ast.BadExpr); !isBad {
+ // only report error if it's a new one
+ p.errorExpected(x.Pos(), "function/method call")
+ }
return nil
}
@@ -1469,10 +1576,10 @@ func (p *parser) parseGoStmt() ast.Stmt {
call := p.parseCallExpr()
p.expectSemi()
if call == nil {
- return &ast.BadStmt{pos, pos + 2} // len("go")
+ return &ast.BadStmt{From: pos, To: pos + 2} // len("go")
}
- return &ast.GoStmt{pos, call}
+ return &ast.GoStmt{Go: pos, Call: call}
}
func (p *parser) parseDeferStmt() ast.Stmt {
@@ -1484,10 +1591,10 @@ func (p *parser) parseDeferStmt() ast.Stmt {
call := p.parseCallExpr()
p.expectSemi()
if call == nil {
- return &ast.BadStmt{pos, pos + 5} // len("defer")
+ return &ast.BadStmt{From: pos, To: pos + 5} // len("defer")
}
- return &ast.DeferStmt{pos, call}
+ return &ast.DeferStmt{Defer: pos, Call: call}
}
func (p *parser) parseReturnStmt() *ast.ReturnStmt {
@@ -1503,7 +1610,7 @@ func (p *parser) parseReturnStmt() *ast.ReturnStmt {
}
p.expectSemi()
- return &ast.ReturnStmt{pos, x}
+ return &ast.ReturnStmt{Return: pos, Results: x}
}
func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt {
@@ -1521,7 +1628,7 @@ func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt {
}
p.expectSemi()
- return &ast.BranchStmt{pos, tok, label}
+ return &ast.BranchStmt{TokPos: pos, Tok: tok, Label: label}
}
func (p *parser) makeExpr(s ast.Stmt) ast.Expr {
@@ -1532,7 +1639,7 @@ func (p *parser) makeExpr(s ast.Stmt) ast.Expr {
return p.checkExpr(es.X)
}
p.error(s.Pos(), "expected condition, found simple statement")
- return &ast.BadExpr{s.Pos(), s.End()}
+ return &ast.BadExpr{From: s.Pos(), To: s.End()}
}
func (p *parser) parseIfStmt() *ast.IfStmt {
@@ -1574,7 +1681,7 @@ func (p *parser) parseIfStmt() *ast.IfStmt {
p.expectSemi()
}
- return &ast.IfStmt{pos, s, x, body, else_}
+ return &ast.IfStmt{If: pos, Init: s, Cond: x, Body: body, Else: else_}
}
func (p *parser) parseTypeList() (list []ast.Expr) {
@@ -1591,7 +1698,7 @@ func (p *parser) parseTypeList() (list []ast.Expr) {
return
}
-func (p *parser) parseCaseClause(exprSwitch bool) *ast.CaseClause {
+func (p *parser) parseCaseClause(typeSwitch bool) *ast.CaseClause {
if p.trace {
defer un(trace(p, "CaseClause"))
}
@@ -1600,10 +1707,10 @@ func (p *parser) parseCaseClause(exprSwitch bool) *ast.CaseClause {
var list []ast.Expr
if p.tok == token.CASE {
p.next()
- if exprSwitch {
- list = p.parseRhsList()
- } else {
+ if typeSwitch {
list = p.parseTypeList()
+ } else {
+ list = p.parseRhsList()
}
} else {
p.expect(token.DEFAULT)
@@ -1614,18 +1721,22 @@ func (p *parser) parseCaseClause(exprSwitch bool) *ast.CaseClause {
body := p.parseStmtList()
p.closeScope()
- return &ast.CaseClause{pos, list, colon, body}
+ return &ast.CaseClause{Case: pos, List: list, Colon: colon, Body: body}
}
-func isExprSwitch(s ast.Stmt) bool {
- if s == nil {
- return true
- }
- if e, ok := s.(*ast.ExprStmt); ok {
- if a, ok := e.X.(*ast.TypeAssertExpr); ok {
- return a.Type != nil // regular type assertion
- }
- return true
+func isTypeSwitchAssert(x ast.Expr) bool {
+ a, ok := x.(*ast.TypeAssertExpr)
+ return ok && a.Type == nil
+}
+
+func isTypeSwitchGuard(s ast.Stmt) bool {
+ switch t := s.(type) {
+ case *ast.ExprStmt:
+ // x.(nil)
+ return isTypeSwitchAssert(t.X)
+ case *ast.AssignStmt:
+ // v := x.(nil)
+ return len(t.Lhs) == 1 && t.Tok == token.DEFINE && len(t.Rhs) == 1 && isTypeSwitchAssert(t.Rhs[0])
}
return false
}
@@ -1651,28 +1762,41 @@ func (p *parser) parseSwitchStmt() ast.Stmt {
s1 = s2
s2 = nil
if p.tok != token.LBRACE {
+ // A TypeSwitchGuard may declare a variable in addition
+ // to the variable declared in the initial SimpleStmt.
+ // Introduce extra scope to avoid redeclaration errors:
+ //
+ // switch t := 0; t := x.(T) { ... }
+ //
+ // (this code is not valid Go because the first t will
+ // cannot be accessed and thus is never used, the extra
+ // scope is needed for the correct error message).
+ //
+ // If we don't have a type switch, s2 must be an expression.
+ // Having the extra nested but empty scope won't affect it.
+ p.openScope()
+ defer p.closeScope()
s2, _ = p.parseSimpleStmt(basic)
}
}
p.exprLev = prevLev
}
- exprSwitch := isExprSwitch(s2)
+ typeSwitch := isTypeSwitchGuard(s2)
lbrace := p.expect(token.LBRACE)
var list []ast.Stmt
for p.tok == token.CASE || p.tok == token.DEFAULT {
- list = append(list, p.parseCaseClause(exprSwitch))
+ list = append(list, p.parseCaseClause(typeSwitch))
}
rbrace := p.expect(token.RBRACE)
p.expectSemi()
- body := &ast.BlockStmt{lbrace, list, rbrace}
+ body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
- if exprSwitch {
- return &ast.SwitchStmt{pos, s1, p.makeExpr(s2), body}
+ if typeSwitch {
+ return &ast.TypeSwitchStmt{Switch: pos, Init: s1, Assign: s2, Body: body}
}
- // type switch
- // TODO(gri): do all the checks!
- return &ast.TypeSwitchStmt{pos, s1, s2, body}
+
+ return &ast.SwitchStmt{Switch: pos, Init: s1, Tag: p.makeExpr(s2), Body: body}
}
func (p *parser) parseCommClause() *ast.CommClause {
@@ -1695,34 +1819,31 @@ func (p *parser) parseCommClause() *ast.CommClause {
arrow := p.pos
p.next()
rhs := p.parseRhs()
- comm = &ast.SendStmt{lhs[0], arrow, rhs}
+ comm = &ast.SendStmt{Chan: lhs[0], Arrow: arrow, Value: rhs}
} else {
// RecvStmt
- pos := p.pos
- tok := p.tok
- var rhs ast.Expr
- if tok == token.ASSIGN || tok == token.DEFINE {
+ if tok := p.tok; tok == token.ASSIGN || tok == token.DEFINE {
// RecvStmt with assignment
if len(lhs) > 2 {
p.errorExpected(lhs[0].Pos(), "1 or 2 expressions")
// continue with first two expressions
lhs = lhs[0:2]
}
+ pos := p.pos
p.next()
- rhs = p.parseRhs()
+ rhs := p.parseRhs()
+ as := &ast.AssignStmt{Lhs: lhs, TokPos: pos, Tok: tok, Rhs: []ast.Expr{rhs}}
+ if tok == token.DEFINE {
+ p.shortVarDecl(as, lhs)
+ }
+ comm = as
} else {
- // rhs must be single receive operation
+ // lhs must be single receive operation
if len(lhs) > 1 {
p.errorExpected(lhs[0].Pos(), "1 expression")
// continue with first expression
}
- rhs = lhs[0]
- lhs = nil // there is no lhs
- }
- if lhs != nil {
- comm = &ast.AssignStmt{lhs, pos, tok, []ast.Expr{rhs}}
- } else {
- comm = &ast.ExprStmt{rhs}
+ comm = &ast.ExprStmt{X: lhs[0]}
}
}
} else {
@@ -1733,7 +1854,7 @@ func (p *parser) parseCommClause() *ast.CommClause {
body := p.parseStmtList()
p.closeScope()
- return &ast.CommClause{pos, comm, colon, body}
+ return &ast.CommClause{Case: pos, Comm: comm, Colon: colon, Body: body}
}
func (p *parser) parseSelectStmt() *ast.SelectStmt {
@@ -1749,9 +1870,9 @@ func (p *parser) parseSelectStmt() *ast.SelectStmt {
}
rbrace := p.expect(token.RBRACE)
p.expectSemi()
- body := &ast.BlockStmt{lbrace, list, rbrace}
+ body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
- return &ast.SelectStmt{pos, body}
+ return &ast.SelectStmt{Select: pos, Body: body}
}
func (p *parser) parseForStmt() ast.Stmt {
@@ -1800,16 +1921,30 @@ func (p *parser) parseForStmt() ast.Stmt {
key = as.Lhs[0]
default:
p.errorExpected(as.Lhs[0].Pos(), "1 or 2 expressions")
- return &ast.BadStmt{pos, body.End()}
+ return &ast.BadStmt{From: pos, To: body.End()}
}
// parseSimpleStmt returned a right-hand side that
// is a single unary expression of the form "range x"
x := as.Rhs[0].(*ast.UnaryExpr).X
- return &ast.RangeStmt{pos, key, value, as.TokPos, as.Tok, x, body}
+ return &ast.RangeStmt{
+ For: pos,
+ Key: key,
+ Value: value,
+ TokPos: as.TokPos,
+ Tok: as.Tok,
+ X: x,
+ Body: body,
+ }
}
// regular for statement
- return &ast.ForStmt{pos, s1, p.makeExpr(s2), s3, body}
+ return &ast.ForStmt{
+ For: pos,
+ Init: s1,
+ Cond: p.makeExpr(s2),
+ Post: s3,
+ Body: body,
+ }
}
func (p *parser) parseStmt() (s ast.Stmt) {
@@ -1819,12 +1954,12 @@ func (p *parser) parseStmt() (s ast.Stmt) {
switch p.tok {
case token.CONST, token.TYPE, token.VAR:
- s = &ast.DeclStmt{p.parseDecl()}
+ s = &ast.DeclStmt{Decl: p.parseDecl(syncStmt)}
case
- // tokens that may start a top-level expression
- token.IDENT, token.INT, token.FLOAT, token.CHAR, token.STRING, token.FUNC, token.LPAREN, // operand
- token.LBRACK, token.STRUCT, // composite type
- token.MUL, token.AND, token.ARROW, token.ADD, token.SUB, token.XOR: // unary operators
+ // tokens that may start an expression
+ token.IDENT, token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING, token.FUNC, token.LPAREN, // operands
+ token.LBRACK, token.STRUCT, // composite types
+ token.ADD, token.SUB, token.MUL, token.AND, token.XOR, token.ARROW, token.NOT: // unary operators
s, _ = p.parseSimpleStmt(labelOk)
// because of the required look-ahead, labeled statements are
// parsed by parseSimpleStmt - don't expect a semicolon after
@@ -1852,17 +1987,17 @@ func (p *parser) parseStmt() (s ast.Stmt) {
case token.FOR:
s = p.parseForStmt()
case token.SEMICOLON:
- s = &ast.EmptyStmt{p.pos}
+ s = &ast.EmptyStmt{Semicolon: p.pos}
p.next()
case token.RBRACE:
// a semicolon may be omitted before a closing "}"
- s = &ast.EmptyStmt{p.pos}
+ s = &ast.EmptyStmt{Semicolon: p.pos}
default:
// no statement found
pos := p.pos
p.errorExpected(pos, "statement")
- p.next() // make progress
- s = &ast.BadStmt{pos, p.pos}
+ syncStmt(p)
+ s = &ast.BadStmt{From: pos, To: p.pos}
}
return
@@ -1873,6 +2008,17 @@ func (p *parser) parseStmt() (s ast.Stmt) {
type parseSpecFunction func(p *parser, doc *ast.CommentGroup, iota int) ast.Spec
+func isValidImport(lit string) bool {
+ const illegalChars = `!"#$%&'()*,:;<=>?[\]^{|}` + "`\uFFFD"
+ s, _ := strconv.Unquote(lit) // go/scanner returns a legal string literal
+ for _, r := range s {
+ if !unicode.IsGraphic(r) || unicode.IsSpace(r) || strings.ContainsRune(illegalChars, r) {
+ return false
+ }
+ }
+ return s != ""
+}
+
func parseImportSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec {
if p.trace {
defer un(trace(p, "ImportSpec"))
@@ -1881,7 +2027,7 @@ func parseImportSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec {
var ident *ast.Ident
switch p.tok {
case token.PERIOD:
- ident = &ast.Ident{p.pos, ".", nil}
+ ident = &ast.Ident{NamePos: p.pos, Name: "."}
p.next()
case token.IDENT:
ident = p.parseIdent()
@@ -1889,7 +2035,10 @@ func parseImportSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec {
var path *ast.BasicLit
if p.tok == token.STRING {
- path = &ast.BasicLit{p.pos, p.tok, p.lit}
+ if !isValidImport(p.lit) {
+ p.error(p.pos, "invalid import path: "+p.lit)
+ }
+ path = &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
p.next()
} else {
p.expect(token.STRING) // use expect() error handling
@@ -1897,7 +2046,12 @@ func parseImportSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec {
p.expectSemi() // call before accessing p.linecomment
// collect imports
- spec := &ast.ImportSpec{doc, ident, path, p.lineComment}
+ spec := &ast.ImportSpec{
+ Doc: doc,
+ Name: ident,
+ Path: path,
+ Comment: p.lineComment,
+ }
p.imports = append(p.imports, spec)
return spec
@@ -1921,7 +2075,13 @@ func parseConstSpec(p *parser, doc *ast.CommentGroup, iota int) ast.Spec {
// a function begins at the end of the ConstSpec or VarSpec and ends at
// the end of the innermost containing block.
// (Global identifiers are resolved in a separate phase after parsing.)
- spec := &ast.ValueSpec{doc, idents, typ, values, p.lineComment}
+ spec := &ast.ValueSpec{
+ Doc: doc,
+ Names: idents,
+ Type: typ,
+ Values: values,
+ Comment: p.lineComment,
+ }
p.declare(spec, iota, p.topScope, ast.Con, idents...)
return spec
@@ -1938,7 +2098,7 @@ func parseTypeSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec {
// at the identifier in the TypeSpec and ends at the end of the innermost
// containing block.
// (Global identifiers are resolved in a separate phase after parsing.)
- spec := &ast.TypeSpec{doc, ident, nil, nil}
+ spec := &ast.TypeSpec{Doc: doc, Name: ident}
p.declare(spec, nil, p.topScope, ast.Typ, ident)
spec.Type = p.parseType()
@@ -1966,7 +2126,13 @@ func parseVarSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec {
// a function begins at the end of the ConstSpec or VarSpec and ends at
// the end of the innermost containing block.
// (Global identifiers are resolved in a separate phase after parsing.)
- spec := &ast.ValueSpec{doc, idents, typ, values, p.lineComment}
+ spec := &ast.ValueSpec{
+ Doc: doc,
+ Names: idents,
+ Type: typ,
+ Values: values,
+ Comment: p.lineComment,
+ }
p.declare(spec, nil, p.topScope, ast.Var, idents...)
return spec
@@ -1993,7 +2159,14 @@ func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.Gen
list = append(list, f(p, nil, 0))
}
- return &ast.GenDecl{doc, pos, keyword, lparen, list, rparen}
+ return &ast.GenDecl{
+ Doc: doc,
+ TokPos: pos,
+ Tok: keyword,
+ Lparen: lparen,
+ Specs: list,
+ Rparen: rparen,
+ }
}
func (p *parser) parseReceiver(scope *ast.Scope) *ast.FieldList {
@@ -2001,14 +2174,12 @@ func (p *parser) parseReceiver(scope *ast.Scope) *ast.FieldList {
defer un(trace(p, "Receiver"))
}
- pos := p.pos
par := p.parseParameters(scope, false)
// must have exactly one receiver
if par.NumFields() != 1 {
- p.errorExpected(pos, "exactly one receiver")
- // TODO determine a better range for BadExpr below
- par.List = []*ast.Field{&ast.Field{Type: &ast.BadExpr{pos, pos}}}
+ p.errorExpected(par.Opening, "exactly one receiver")
+ par.List = []*ast.Field{{Type: &ast.BadExpr{From: par.Opening, To: par.Closing + 1}}}
return par
}
@@ -2016,8 +2187,13 @@ func (p *parser) parseReceiver(scope *ast.Scope) *ast.FieldList {
recv := par.List[0]
base := deref(recv.Type)
if _, isIdent := base.(*ast.Ident); !isIdent {
- p.errorExpected(base.Pos(), "(unqualified) identifier")
- par.List = []*ast.Field{&ast.Field{Type: &ast.BadExpr{recv.Pos(), recv.End()}}}
+ if _, isBad := base.(*ast.BadExpr); !isBad {
+ // only report error if it's a new one
+ p.errorExpected(base.Pos(), "(unqualified) identifier")
+ }
+ par.List = []*ast.Field{
+ {Type: &ast.BadExpr{From: recv.Pos(), To: recv.End()}},
+ }
}
return par
@@ -2047,7 +2223,17 @@ func (p *parser) parseFuncDecl() *ast.FuncDecl {
}
p.expectSemi()
- decl := &ast.FuncDecl{doc, recv, ident, &ast.FuncType{pos, params, results}, body}
+ decl := &ast.FuncDecl{
+ Doc: doc,
+ Recv: recv,
+ Name: ident,
+ Type: &ast.FuncType{
+ Func: pos,
+ Params: params,
+ Results: results,
+ },
+ Body: body,
+ }
if recv == nil {
// Go spec: The scope of an identifier denoting a constant, type,
// variable, or function (but not method) declared at top level
@@ -2063,7 +2249,7 @@ func (p *parser) parseFuncDecl() *ast.FuncDecl {
return decl
}
-func (p *parser) parseDecl() ast.Decl {
+func (p *parser) parseDecl(sync func(*parser)) ast.Decl {
if p.trace {
defer un(trace(p, "Declaration"))
}
@@ -2085,26 +2271,13 @@ func (p *parser) parseDecl() ast.Decl {
default:
pos := p.pos
p.errorExpected(pos, "declaration")
- p.next() // make progress
- decl := &ast.BadDecl{pos, p.pos}
- return decl
+ sync(p)
+ return &ast.BadDecl{From: pos, To: p.pos}
}
return p.parseGenDecl(p.tok, f)
}
-func (p *parser) parseDeclList() (list []ast.Decl) {
- if p.trace {
- defer un(trace(p, "DeclList"))
- }
-
- for p.tok != token.EOF {
- list = append(list, p.parseDecl())
- }
-
- return
-}
-
// ----------------------------------------------------------------------------
// Source files
@@ -2129,7 +2302,7 @@ func (p *parser) parseFile() *ast.File {
// Don't bother parsing the rest if we had errors already.
// Likely not a Go source file at all.
- if p.ErrorCount() == 0 && p.mode&PackageClauseOnly == 0 {
+ if p.errors.Len() == 0 && p.mode&PackageClauseOnly == 0 {
// import decls
for p.tok == token.IMPORT {
decls = append(decls, p.parseGenDecl(token.IMPORT, parseImportSpec))
@@ -2138,7 +2311,7 @@ func (p *parser) parseFile() *ast.File {
if p.mode&ImportsOnly == 0 {
// rest of package body
for p.tok != token.EOF {
- decls = append(decls, p.parseDecl())
+ decls = append(decls, p.parseDecl(syncDecl))
}
}
}
@@ -2157,5 +2330,14 @@ func (p *parser) parseFile() *ast.File {
}
}
- return &ast.File{doc, pos, ident, decls, p.pkgScope, p.imports, p.unresolved[0:i], p.comments}
+ return &ast.File{
+ Doc: doc,
+ Package: pos,
+ Name: ident,
+ Decls: decls,
+ Scope: p.pkgScope,
+ Imports: p.imports,
+ Unresolved: p.unresolved[0:i],
+ Comments: p.comments,
+ }
}
diff --git a/src/pkg/go/parser/parser_test.go b/src/pkg/go/parser/parser_test.go
index 39a78e515..5e45acd00 100644
--- a/src/pkg/go/parser/parser_test.go
+++ b/src/pkg/go/parser/parser_test.go
@@ -5,6 +5,8 @@
package parser
import (
+ "fmt"
+ "go/ast"
"go/token"
"os"
"testing"
@@ -12,81 +14,14 @@ import (
var fset = token.NewFileSet()
-var illegalInputs = []interface{}{
- nil,
- 3.14,
- []byte(nil),
- "foo!",
- `package p; func f() { if /* should have condition */ {} };`,
- `package p; func f() { if ; /* should have condition */ {} };`,
- `package p; func f() { if f(); /* should have condition */ {} };`,
- `package p; const c; /* should have constant value */`,
- `package p; func f() { if _ = range x; true {} };`,
- `package p; func f() { switch _ = range x; true {} };`,
- `package p; func f() { for _ = range x ; ; {} };`,
- `package p; func f() { for ; ; _ = range x {} };`,
- `package p; func f() { for ; _ = range x ; {} };`,
- `package p; var a = [1]int; /* illegal expression */`,
- `package p; var a = [...]int; /* illegal expression */`,
- `package p; var a = struct{} /* illegal expression */`,
- `package p; var a = func(); /* illegal expression */`,
- `package p; var a = interface{} /* illegal expression */`,
- `package p; var a = []int /* illegal expression */`,
- `package p; var a = map[int]int /* illegal expression */`,
- `package p; var a = chan int; /* illegal expression */`,
- `package p; var a = []int{[]int}; /* illegal expression */`,
- `package p; var a = ([]int); /* illegal expression */`,
- `package p; var a = a[[]int:[]int]; /* illegal expression */`,
- `package p; var a = <- chan int; /* illegal expression */`,
- `package p; func f() { select { case _ <- chan int: } };`,
-}
-
-func TestParseIllegalInputs(t *testing.T) {
- for _, src := range illegalInputs {
- _, err := ParseFile(fset, "", src, 0)
- if err == nil {
- t.Errorf("ParseFile(%v) should have failed", src)
- }
- }
-}
-
-var validPrograms = []interface{}{
- "package p\n",
- `package p;`,
- `package p; import "fmt"; func f() { fmt.Println("Hello, World!") };`,
- `package p; func f() { if f(T{}) {} };`,
- `package p; func f() { _ = (<-chan int)(x) };`,
- `package p; func f() { _ = (<-chan <-chan int)(x) };`,
- `package p; func f(func() func() func());`,
- `package p; func f(...T);`,
- `package p; func f(float, ...int);`,
- `package p; func f(x int, a ...int) { f(0, a...); f(1, a...,) };`,
- `package p; type T []int; var a []bool; func f() { if a[T{42}[0]] {} };`,
- `package p; type T []int; func g(int) bool { return true }; func f() { if g(T{42}[0]) {} };`,
- `package p; type T []int; func f() { for _ = range []int{T{42}[0]} {} };`,
- `package p; var a = T{{1, 2}, {3, 4}}`,
- `package p; func f() { select { case <- c: case c <- d: case c <- <- d: case <-c <- d: } };`,
- `package p; func f() { select { case x := (<-c): } };`,
- `package p; func f() { if ; true {} };`,
- `package p; func f() { switch ; {} };`,
- `package p; func f() { for _ = range "foo" + "bar" {} };`,
-}
-
-func TestParseValidPrograms(t *testing.T) {
- for _, src := range validPrograms {
- _, err := ParseFile(fset, "", src, 0)
- if err != nil {
- t.Errorf("ParseFile(%q): %v", src, err)
- }
- }
-}
-
var validFiles = []string{
"parser.go",
"parser_test.go",
+ "error_test.go",
+ "short_test.go",
}
-func TestParse3(t *testing.T) {
+func TestParse(t *testing.T) {
for _, filename := range validFiles {
_, err := ParseFile(fset, filename, nil, DeclarationErrors)
if err != nil {
@@ -106,9 +41,9 @@ func nameFilter(filename string) bool {
return true
}
-func dirFilter(f *os.FileInfo) bool { return nameFilter(f.Name) }
+func dirFilter(f os.FileInfo) bool { return nameFilter(f.Name()) }
-func TestParse4(t *testing.T) {
+func TestParseDir(t *testing.T) {
path := "."
pkgs, err := ParseDir(fset, path, dirFilter, 0)
if err != nil {
@@ -128,3 +63,117 @@ func TestParse4(t *testing.T) {
}
}
}
+
+func TestParseExpr(t *testing.T) {
+ // just kicking the tires:
+ // a valid expression
+ src := "a + b"
+ x, err := ParseExpr(src)
+ if err != nil {
+ t.Errorf("ParseExpr(%s): %v", src, err)
+ }
+ // sanity check
+ if _, ok := x.(*ast.BinaryExpr); !ok {
+ t.Errorf("ParseExpr(%s): got %T, expected *ast.BinaryExpr", src, x)
+ }
+
+ // an invalid expression
+ src = "a + *"
+ _, err = ParseExpr(src)
+ if err == nil {
+ t.Errorf("ParseExpr(%s): %v", src, err)
+ }
+
+ // it must not crash
+ for _, src := range valids {
+ ParseExpr(src)
+ }
+}
+
+func TestColonEqualsScope(t *testing.T) {
+ f, err := ParseFile(fset, "", `package p; func f() { x, y, z := x, y, z }`, 0)
+ if err != nil {
+ t.Errorf("parse: %s", err)
+ }
+
+ // RHS refers to undefined globals; LHS does not.
+ as := f.Decls[0].(*ast.FuncDecl).Body.List[0].(*ast.AssignStmt)
+ for _, v := range as.Rhs {
+ id := v.(*ast.Ident)
+ if id.Obj != nil {
+ t.Errorf("rhs %s has Obj, should not", id.Name)
+ }
+ }
+ for _, v := range as.Lhs {
+ id := v.(*ast.Ident)
+ if id.Obj == nil {
+ t.Errorf("lhs %s does not have Obj, should", id.Name)
+ }
+ }
+}
+
+func TestVarScope(t *testing.T) {
+ f, err := ParseFile(fset, "", `package p; func f() { var x, y, z = x, y, z }`, 0)
+ if err != nil {
+ t.Errorf("parse: %s", err)
+ }
+
+ // RHS refers to undefined globals; LHS does not.
+ as := f.Decls[0].(*ast.FuncDecl).Body.List[0].(*ast.DeclStmt).Decl.(*ast.GenDecl).Specs[0].(*ast.ValueSpec)
+ for _, v := range as.Values {
+ id := v.(*ast.Ident)
+ if id.Obj != nil {
+ t.Errorf("rhs %s has Obj, should not", id.Name)
+ }
+ }
+ for _, id := range as.Names {
+ if id.Obj == nil {
+ t.Errorf("lhs %s does not have Obj, should", id.Name)
+ }
+ }
+}
+
+var imports = map[string]bool{
+ `"a"`: true,
+ "`a`": true,
+ `"a/b"`: true,
+ `"a.b"`: true,
+ `"m\x61th"`: true,
+ `"greek/αβ"`: true,
+ `""`: false,
+
+ // Each of these pairs tests both `` vs "" strings
+ // and also use of invalid characters spelled out as
+ // escape sequences and written directly.
+ // For example `"\x00"` tests import "\x00"
+ // while "`\x00`" tests import `<actual-NUL-byte>`.
+ `"\x00"`: false,
+ "`\x00`": false,
+ `"\x7f"`: false,
+ "`\x7f`": false,
+ `"a!"`: false,
+ "`a!`": false,
+ `"a b"`: false,
+ "`a b`": false,
+ `"a\\b"`: false,
+ "`a\\b`": false,
+ "\"`a`\"": false,
+ "`\"a\"`": false,
+ `"\x80\x80"`: false,
+ "`\x80\x80`": false,
+ `"\xFFFD"`: false,
+ "`\xFFFD`": false,
+}
+
+func TestImports(t *testing.T) {
+ for path, isValid := range imports {
+ src := fmt.Sprintf("package p; import %s", path)
+ _, err := ParseFile(fset, "", src, 0)
+ switch {
+ case err != nil && isValid:
+ t.Errorf("ParseFile(%s): got %v; expected no error", src, err)
+ case err == nil && !isValid:
+ t.Errorf("ParseFile(%s): got no error; expected one", src)
+ }
+ }
+}
diff --git a/src/pkg/go/parser/short_test.go b/src/pkg/go/parser/short_test.go
new file mode 100644
index 000000000..238492bf3
--- /dev/null
+++ b/src/pkg/go/parser/short_test.go
@@ -0,0 +1,75 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains test cases for short valid and invalid programs.
+
+package parser
+
+import "testing"
+
+var valids = []string{
+ "package p\n",
+ `package p;`,
+ `package p; import "fmt"; func f() { fmt.Println("Hello, World!") };`,
+ `package p; func f() { if f(T{}) {} };`,
+ `package p; func f() { _ = (<-chan int)(x) };`,
+ `package p; func f() { _ = (<-chan <-chan int)(x) };`,
+ `package p; func f(func() func() func());`,
+ `package p; func f(...T);`,
+ `package p; func f(float, ...int);`,
+ `package p; func f(x int, a ...int) { f(0, a...); f(1, a...,) };`,
+ `package p; func f(int,) {};`,
+ `package p; func f(...int,) {};`,
+ `package p; func f(x ...int,) {};`,
+ `package p; type T []int; var a []bool; func f() { if a[T{42}[0]] {} };`,
+ `package p; type T []int; func g(int) bool { return true }; func f() { if g(T{42}[0]) {} };`,
+ `package p; type T []int; func f() { for _ = range []int{T{42}[0]} {} };`,
+ `package p; var a = T{{1, 2}, {3, 4}}`,
+ `package p; func f() { select { case <- c: case c <- d: case c <- <- d: case <-c <- d: } };`,
+ `package p; func f() { select { case x := (<-c): } };`,
+ `package p; func f() { if ; true {} };`,
+ `package p; func f() { switch ; {} };`,
+ `package p; func f() { for _ = range "foo" + "bar" {} };`,
+}
+
+func TestValid(t *testing.T) {
+ for _, src := range valids {
+ checkErrors(t, src, src)
+ }
+}
+
+var invalids = []string{
+ `foo /* ERROR "expected 'package'" */ !`,
+ `package p; func f() { if { /* ERROR "expected operand" */ } };`,
+ `package p; func f() { if ; { /* ERROR "expected operand" */ } };`,
+ `package p; func f() { if f(); { /* ERROR "expected operand" */ } };`,
+ `package p; const c; /* ERROR "expected '='" */`,
+ `package p; func f() { if _ /* ERROR "expected condition" */ = range x; true {} };`,
+ `package p; func f() { switch _ /* ERROR "expected condition" */ = range x; true {} };`,
+ `package p; func f() { for _ = range x ; /* ERROR "expected '{'" */ ; {} };`,
+ `package p; func f() { for ; ; _ = range /* ERROR "expected operand" */ x {} };`,
+ `package p; func f() { for ; _ /* ERROR "expected condition" */ = range x ; {} };`,
+ `package p; func f() { switch t /* ERROR "expected condition" */ = t.(type) {} };`,
+ `package p; func f() { switch t /* ERROR "expected condition" */ , t = t.(type) {} };`,
+ `package p; func f() { switch t /* ERROR "expected condition" */ = t.(type), t {} };`,
+ `package p; var a = [ /* ERROR "expected expression" */ 1]int;`,
+ `package p; var a = [ /* ERROR "expected expression" */ ...]int;`,
+ `package p; var a = struct /* ERROR "expected expression" */ {}`,
+ `package p; var a = func /* ERROR "expected expression" */ ();`,
+ `package p; var a = interface /* ERROR "expected expression" */ {}`,
+ `package p; var a = [ /* ERROR "expected expression" */ ]int`,
+ `package p; var a = map /* ERROR "expected expression" */ [int]int`,
+ `package p; var a = chan /* ERROR "expected expression" */ int;`,
+ `package p; var a = []int{[ /* ERROR "expected expression" */ ]int};`,
+ `package p; var a = ( /* ERROR "expected expression" */ []int);`,
+ `package p; var a = a[[ /* ERROR "expected expression" */ ]int:[]int];`,
+ `package p; var a = <- /* ERROR "expected expression" */ chan int;`,
+ `package p; func f() { select { case _ <- chan /* ERROR "expected expression" */ int: } };`,
+}
+
+func TestInvalid(t *testing.T) {
+ for _, src := range invalids {
+ checkErrors(t, src, src)
+ }
+}
diff --git a/src/pkg/go/parser/testdata/commas.src b/src/pkg/go/parser/testdata/commas.src
new file mode 100644
index 000000000..af6e70645
--- /dev/null
+++ b/src/pkg/go/parser/testdata/commas.src
@@ -0,0 +1,19 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Test case for error messages/parser synchronization
+// after missing commas.
+
+package p
+
+var _ = []int{
+ 0 /* ERROR "missing ','" */
+}
+
+var _ = []int{
+ 0,
+ 1,
+ 2,
+ 3 /* ERROR "missing ','" */
+}
diff --git a/src/pkg/go/parser/testdata/issue3106.src b/src/pkg/go/parser/testdata/issue3106.src
new file mode 100644
index 000000000..82796c8ce
--- /dev/null
+++ b/src/pkg/go/parser/testdata/issue3106.src
@@ -0,0 +1,46 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Test case for issue 3106: Better synchronization of
+// parser after certain syntax errors.
+
+package main
+
+func f() {
+ var m Mutex
+ c := MakeCond(&m)
+ percent := 0
+ const step = 10
+ for i := 0; i < 5; i++ {
+ go func() {
+ for {
+ // Emulates some useful work.
+ time.Sleep(1e8)
+ m.Lock()
+ defer
+ if /* ERROR "expected operand, found 'if'" */ percent == 100 {
+ m.Unlock()
+ break
+ }
+ percent++
+ if percent % step == 0 {
+ //c.Signal()
+ }
+ m.Unlock()
+ }
+ }()
+ }
+ for {
+ m.Lock()
+ if percent == 0 || percent % step != 0 {
+ c.Wait()
+ }
+ fmt.Print(",")
+ if percent == 100 {
+ m.Unlock()
+ break
+ }
+ m.Unlock()
+ }
+}