diff options
Diffstat (limited to 'src/pkg/go/parser')
-rw-r--r-- | src/pkg/go/parser/error_test.go | 10 | ||||
-rw-r--r-- | src/pkg/go/parser/interface.go | 48 | ||||
-rw-r--r-- | src/pkg/go/parser/parser.go | 249 | ||||
-rw-r--r-- | src/pkg/go/parser/parser_test.go | 47 | ||||
-rw-r--r-- | src/pkg/go/parser/performance_test.go | 30 | ||||
-rw-r--r-- | src/pkg/go/parser/short_test.go | 13 |
6 files changed, 279 insertions, 118 deletions
diff --git a/src/pkg/go/parser/error_test.go b/src/pkg/go/parser/error_test.go index 377c8b80c..b59fda11a 100644 --- a/src/pkg/go/parser/error_test.go +++ b/src/pkg/go/parser/error_test.go @@ -34,9 +34,11 @@ import ( const testdata = "testdata" +var fsetErrs *token.FileSet + // getFile assumes that each filename occurs at most once func getFile(filename string) (file *token.File) { - fset.Iterate(func(f *token.File) bool { + fsetErrs.Iterate(func(f *token.File) bool { if f.Name() == filename { if file != nil { panic(filename + " used multiple times") @@ -125,7 +127,7 @@ func compareErrors(t *testing.T, expected map[token.Pos]string, found scanner.Er if len(expected) > 0 { t.Errorf("%d errors not reported:", len(expected)) for pos, msg := range expected { - t.Errorf("%s: %s\n", fset.Position(pos), msg) + t.Errorf("%s: %s\n", fsetErrs.Position(pos), msg) } } } @@ -137,12 +139,13 @@ func checkErrors(t *testing.T, filename string, input interface{}) { return } - _, err = ParseFile(fset, filename, src, DeclarationErrors) + _, err = ParseFile(fsetErrs, filename, src, DeclarationErrors|AllErrors) found, ok := err.(scanner.ErrorList) if err != nil && !ok { t.Error(err) return } + found.RemoveMultiples() // we are expecting the following errors // (collect these after parsing a file so that it is found in the file set) @@ -153,6 +156,7 @@ func checkErrors(t *testing.T, filename string, input interface{}) { } func TestErrors(t *testing.T) { + fsetErrs = token.NewFileSet() list, err := ioutil.ReadDir(testdata) if err != nil { t.Fatal(err) diff --git a/src/pkg/go/parser/interface.go b/src/pkg/go/parser/interface.go index 5c203a784..39affdd6b 100644 --- a/src/pkg/go/parser/interface.go +++ b/src/pkg/go/parser/interface.go @@ -52,12 +52,13 @@ func readSource(filename string, src interface{}) ([]byte, error) { type Mode uint const ( - PackageClauseOnly Mode = 1 << iota // parsing stops after package clause - ImportsOnly // parsing stops after import declarations - ParseComments // parse comments and add them to AST - Trace // print a trace of parsed productions - DeclarationErrors // report declaration errors - SpuriousErrors // report all (not just the first) errors per line + PackageClauseOnly Mode = 1 << iota // stop parsing after package clause + ImportsOnly // stop parsing after import declarations + ParseComments // parse comments and add them to AST + Trace // print a trace of parsed productions + DeclarationErrors // report declaration errors + SpuriousErrors // same as AllErrors, for backward-compatibility + AllErrors = SpuriousErrors // report all errors (not just the first 10 on different lines) ) // ParseFile parses the source code of a single Go source file and returns @@ -79,26 +80,39 @@ const ( // representing the fragments of erroneous source code). Multiple errors // are returned via a scanner.ErrorList which is sorted by file position. // -func ParseFile(fset *token.FileSet, filename string, src interface{}, mode Mode) (*ast.File, error) { +func ParseFile(fset *token.FileSet, filename string, src interface{}, mode Mode) (f *ast.File, err error) { // get source text, err := readSource(filename, src) if err != nil { return nil, err } - // parse source var p parser - p.init(fset, filename, text, mode) - f := p.parseFile() + defer func() { + if e := recover(); e != nil { + _ = e.(bailout) // re-panics if it's not a bailout + } + + // set result values + if f == nil { + // source is not a valid Go source file - satisfy + // ParseFile API and return a valid (but) empty + // *ast.File + f = &ast.File{ + Name: new(ast.Ident), + Scope: ast.NewScope(nil), + } + } - // sort errors - if p.mode&SpuriousErrors == 0 { - p.errors.RemoveMultiples() - } else { p.errors.Sort() - } + err = p.errors.Err() + }() + + // parse source + p.init(fset, filename, text, mode) + f = p.parseFile() - return f, p.errors.Err() + return } // ParseDir calls ParseFile for the files in the directory specified by path and @@ -149,7 +163,7 @@ func ParseDir(fset *token.FileSet, path string, filter func(os.FileInfo) bool, m // ParseExpr is a convenience function for obtaining the AST of an expression x. // The position information recorded in the AST is undefined. -// +// func ParseExpr(x string) (ast.Expr, error) { // parse x within the context of a complete package for correct scopes; // use //line directive for correct positions in error messages and put diff --git a/src/pkg/go/parser/parser.go b/src/pkg/go/parser/parser.go index 20e505d97..a021a5abe 100644 --- a/src/pkg/go/parser/parser.go +++ b/src/pkg/go/parser/parser.go @@ -28,7 +28,7 @@ type parser struct { // Tracing/debugging mode Mode // parsing mode trace bool // == (mode & Trace != 0) - indent uint // indentation used for tracing output + indent int // indentation used for tracing output // Comments comments []*ast.CommentGroup @@ -56,7 +56,7 @@ type parser struct { unresolved []*ast.Ident // unresolved identifiers imports []*ast.ImportSpec // list of imports - // Label scope + // Label scopes // (maintained by open/close LabelScope) labelScope *ast.Scope // label scope for current function targetStack [][]*ast.Ident // stack of unresolved labels @@ -75,14 +75,6 @@ func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode Mod p.trace = mode&Trace != 0 // for convenience (p.trace is used frequently) p.next() - - // set up the pkgScope here (as opposed to in parseFile) because - // there are other parser entry points (ParseExpr, etc.) - p.openScope() - p.pkgScope = p.topScope - - // for the same reason, set up a label scope - p.openLabelScope() } // ---------------------------------------------------------------------------- @@ -157,7 +149,7 @@ func (p *parser) shortVarDecl(decl *ast.AssignStmt, list []ast.Expr) { } } } else { - p.errorExpected(x.Pos(), "identifier") + p.errorExpected(x.Pos(), "identifier on left side of :=") } } if n == 0 && p.mode&DeclarationErrors != 0 { @@ -170,7 +162,12 @@ func (p *parser) shortVarDecl(decl *ast.AssignStmt, list []ast.Expr) { // internal consistency. var unresolved = new(ast.Object) -func (p *parser) resolve(x ast.Expr) { +// If x is an identifier, tryResolve attempts to resolve x by looking up +// the object it denotes. If no object is found and collectUnresolved is +// set, x is marked as unresolved and collected in the list of unresolved +// identifiers. +// +func (p *parser) tryResolve(x ast.Expr, collectUnresolved bool) { // nothing to do if x is not an identifier or the blank identifier ident, _ := x.(*ast.Ident) if ident == nil { @@ -191,23 +188,30 @@ func (p *parser) resolve(x ast.Expr) { // must be found either in the file scope, package scope // (perhaps in another file), or universe scope --- collect // them so that they can be resolved later - ident.Obj = unresolved - p.unresolved = append(p.unresolved, ident) + if collectUnresolved { + ident.Obj = unresolved + p.unresolved = append(p.unresolved, ident) + } +} + +func (p *parser) resolve(x ast.Expr) { + p.tryResolve(x, true) } // ---------------------------------------------------------------------------- // Parsing support func (p *parser) printTrace(a ...interface{}) { - const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " + - ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " - const n = uint(len(dots)) + const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " + const n = len(dots) pos := p.file.Position(p.pos) fmt.Printf("%5d:%3d: ", pos.Line, pos.Column) i := 2 * p.indent - for ; i > n; i -= n { + for i > n { fmt.Print(dots) + i -= n } + // i <= n fmt.Print(dots[0:i]) fmt.Println(a...) } @@ -218,7 +222,7 @@ func trace(p *parser, msg string) *parser { return p } -// Usage pattern: defer un(trace(p, "...")); +// Usage pattern: defer un(trace(p, "...")) func un(p *parser) { p.indent-- p.printTrace(")") @@ -304,14 +308,14 @@ func (p *parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline func (p *parser) next() { p.leadComment = nil p.lineComment = nil - line := p.file.Line(p.pos) // current line + prev := p.pos p.next0() if p.tok == token.COMMENT { var comment *ast.CommentGroup var endline int - if p.file.Line(p.pos) == line { + if p.file.Line(p.pos) == p.file.Line(prev) { // The comment is on same line as the previous token; it // cannot be a lead comment but may be a line comment. comment, endline = p.consumeCommentGroup(0) @@ -336,8 +340,26 @@ func (p *parser) next() { } } +// A bailout panic is raised to indicate early termination. +type bailout struct{} + func (p *parser) error(pos token.Pos, msg string) { - p.errors.Add(p.file.Position(pos), msg) + epos := p.file.Position(pos) + + // If AllErrors is not set, discard errors reported on the same line + // as the last recorded error and stop parsing if there are more than + // 10 errors. + if p.mode&AllErrors == 0 { + n := len(p.errors) + if n > 0 && p.errors[n-1].Pos.Line == epos.Line { + return // discard - likely a spurious error + } + if n > 10 { + panic(bailout{}) + } + } + + p.errors.Add(epos, msg) } func (p *parser) errorExpected(pos token.Pos, msg string) { @@ -585,14 +607,15 @@ func (p *parser) parseTypeName() ast.Expr { return ident } -func (p *parser) parseArrayType(ellipsisOk bool) ast.Expr { +func (p *parser) parseArrayType() ast.Expr { if p.trace { defer un(trace(p, "ArrayType")) } lbrack := p.expect(token.LBRACK) var len ast.Expr - if ellipsisOk && p.tok == token.ELLIPSIS { + // always permit ellipsis for more fault-tolerant parsing + if p.tok == token.ELLIPSIS { len = &ast.Ellipsis{Ellipsis: p.pos} p.next() } else if p.tok != token.RBRACK { @@ -704,7 +727,7 @@ func (p *parser) tryVarType(isParam bool) ast.Expr { if isParam && p.tok == token.ELLIPSIS { pos := p.pos p.next() - typ := p.tryIdentOrType(isParam) // don't use parseType so we can provide better error message + typ := p.tryIdentOrType() // don't use parseType so we can provide better error message if typ != nil { p.resolve(typ) } else { @@ -713,7 +736,7 @@ func (p *parser) tryVarType(isParam bool) ast.Expr { } return &ast.Ellipsis{Ellipsis: pos, Elt: typ} } - return p.tryIdentOrType(false) + return p.tryIdentOrType() } // If the result is an identifier, it is not resolved. @@ -931,29 +954,31 @@ func (p *parser) parseChanType() *ast.ChanType { pos := p.pos dir := ast.SEND | ast.RECV + var arrow token.Pos if p.tok == token.CHAN { p.next() if p.tok == token.ARROW { + arrow = p.pos p.next() dir = ast.SEND } } else { - p.expect(token.ARROW) + arrow = p.expect(token.ARROW) p.expect(token.CHAN) dir = ast.RECV } value := p.parseType() - return &ast.ChanType{Begin: pos, Dir: dir, Value: value} + return &ast.ChanType{Begin: pos, Arrow: arrow, Dir: dir, Value: value} } // If the result is an identifier, it is not resolved. -func (p *parser) tryIdentOrType(ellipsisOk bool) ast.Expr { +func (p *parser) tryIdentOrType() ast.Expr { switch p.tok { case token.IDENT: return p.parseTypeName() case token.LBRACK: - return p.parseArrayType(ellipsisOk) + return p.parseArrayType() case token.STRUCT: return p.parseStructType() case token.MUL: @@ -980,7 +1005,7 @@ func (p *parser) tryIdentOrType(ellipsisOk bool) ast.Expr { } func (p *parser) tryType() ast.Expr { - typ := p.tryIdentOrType(false) + typ := p.tryIdentOrType() if typ != nil { p.resolve(typ) } @@ -1088,7 +1113,7 @@ func (p *parser) parseOperand(lhs bool) ast.Expr { return p.parseFuncTypeOrLit() } - if typ := p.tryIdentOrType(true); typ != nil { + if typ := p.tryIdentOrType(); typ != nil { // could be type for composite literal or conversion _, isIdent := typ.(*ast.Ident) assert(!isIdent, "type cannot be identifier") @@ -1193,14 +1218,35 @@ func (p *parser) parseElement(keyOk bool) ast.Expr { return p.parseLiteralValue(nil) } - x := p.checkExpr(p.parseExpr(keyOk)) // don't resolve if map key + // Because the parser doesn't know the composite literal type, it cannot + // know if a key that's an identifier is a struct field name or a name + // denoting a value. The former is not resolved by the parser or the + // resolver. + // + // Instead, _try_ to resolve such a key if possible. If it resolves, + // it a) has correctly resolved, or b) incorrectly resolved because + // the key is a struct field with a name matching another identifier. + // In the former case we are done, and in the latter case we don't + // care because the type checker will do a separate field lookup. + // + // If the key does not resolve, it a) must be defined at the top + // level in another file of the same package, the universe scope, or be + // undeclared; or b) it is a struct field. In the former case, the type + // checker can do a top-level lookup, and in the latter case it will do + // a separate field lookup. + x := p.checkExpr(p.parseExpr(keyOk)) if keyOk { if p.tok == token.COLON { colon := p.pos p.next() + // Try to resolve the key but don't collect it + // as unresolved identifier if it fails so that + // we don't get (possibly false) errors about + // undeclared names. + p.tryResolve(x, false) return &ast.KeyValueExpr{Key: x, Colon: colon, Value: p.parseElement(false)} } - p.resolve(x) // not a map key + p.resolve(x) // not a key } return x @@ -1404,16 +1450,49 @@ func (p *parser) parseUnaryExpr(lhs bool) ast.Expr { case token.ARROW: // channel type or receive expression - pos := p.pos + arrow := p.pos p.next() - if p.tok == token.CHAN { - p.next() - value := p.parseType() - return &ast.ChanType{Begin: pos, Dir: ast.RECV, Value: value} - } + + // If the next token is token.CHAN we still don't know if it + // is a channel type or a receive operation - we only know + // once we have found the end of the unary expression. There + // are two cases: + // + // <- type => (<-type) must be channel type + // <- expr => <-(expr) is a receive from an expression + // + // In the first case, the arrow must be re-associated with + // the channel type parsed already: + // + // <- (chan type) => (<-chan type) + // <- (chan<- type) => (<-chan (<-type)) x := p.parseUnaryExpr(false) - return &ast.UnaryExpr{OpPos: pos, Op: token.ARROW, X: p.checkExpr(x)} + + // determine which case we have + if typ, ok := x.(*ast.ChanType); ok { + // (<-type) + + // re-associate position info and <- + dir := ast.SEND + for ok && dir == ast.SEND { + if typ.Dir == ast.RECV { + // error: (<-type) is (<-(<-chan T)) + p.errorExpected(typ.Arrow, "'chan'") + } + arrow, typ.Begin, typ.Arrow = typ.Arrow, arrow, arrow + dir, typ.Dir = typ.Dir, ast.RECV + typ, ok = typ.Value.(*ast.ChanType) + } + if dir == ast.SEND { + p.errorExpected(arrow, "channel type") + } + + return x + } + + // <-(expr) + return &ast.UnaryExpr{OpPos: arrow, Op: token.ARROW, X: p.checkExpr(x)} case token.MUL: // pointer type or unary "*" expression @@ -1774,7 +1853,7 @@ func (p *parser) parseSwitchStmt() ast.Stmt { // // switch t := 0; t := x.(T) { ... } // - // (this code is not valid Go because the first t will + // (this code is not valid Go because the first t // cannot be accessed and thus is never used, the extra // scope is needed for the correct error message). // @@ -2012,7 +2091,7 @@ func (p *parser) parseStmt() (s ast.Stmt) { // ---------------------------------------------------------------------------- // Declarations -type parseSpecFunction func(p *parser, doc *ast.CommentGroup, iota int) ast.Spec +type parseSpecFunction func(p *parser, doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec func isValidImport(lit string) bool { const illegalChars = `!"#$%&'()*,:;<=>?[\]^{|}` + "`\uFFFD" @@ -2025,7 +2104,7 @@ func isValidImport(lit string) bool { return s != "" } -func parseImportSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec { +func (p *parser) parseImportSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec { if p.trace { defer un(trace(p, "ImportSpec")) } @@ -2063,15 +2142,15 @@ func parseImportSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec { return spec } -func parseConstSpec(p *parser, doc *ast.CommentGroup, iota int) ast.Spec { +func (p *parser) parseValueSpec(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec { if p.trace { - defer un(trace(p, "ConstSpec")) + defer un(trace(p, keyword.String()+"Spec")) } idents := p.parseIdentList() typ := p.tryType() var values []ast.Expr - if typ != nil || p.tok == token.ASSIGN || iota == 0 { + if p.tok == token.ASSIGN || keyword == token.CONST && (typ != nil || iota == 0) || keyword == token.VAR && typ == nil { p.expect(token.ASSIGN) values = p.parseRhsList() } @@ -2088,12 +2167,16 @@ func parseConstSpec(p *parser, doc *ast.CommentGroup, iota int) ast.Spec { Values: values, Comment: p.lineComment, } - p.declare(spec, iota, p.topScope, ast.Con, idents...) + kind := ast.Con + if keyword == token.VAR { + kind = ast.Var + } + p.declare(spec, iota, p.topScope, kind, idents...) return spec } -func parseTypeSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec { +func (p *parser) parseTypeSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec { if p.trace { defer un(trace(p, "TypeSpec")) } @@ -2114,36 +2197,6 @@ func parseTypeSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec { return spec } -func parseVarSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec { - if p.trace { - defer un(trace(p, "VarSpec")) - } - - idents := p.parseIdentList() - typ := p.tryType() - var values []ast.Expr - if typ == nil || p.tok == token.ASSIGN { - p.expect(token.ASSIGN) - values = p.parseRhsList() - } - p.expectSemi() // call before accessing p.linecomment - - // Go spec: The scope of a constant or variable identifier declared inside - // a function begins at the end of the ConstSpec or VarSpec and ends at - // the end of the innermost containing block. - // (Global identifiers are resolved in a separate phase after parsing.) - spec := &ast.ValueSpec{ - Doc: doc, - Names: idents, - Type: typ, - Values: values, - Comment: p.lineComment, - } - p.declare(spec, nil, p.topScope, ast.Var, idents...) - - return spec -} - func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl { if p.trace { defer un(trace(p, "GenDecl("+keyword.String()+")")) @@ -2157,12 +2210,12 @@ func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.Gen lparen = p.pos p.next() for iota := 0; p.tok != token.RPAREN && p.tok != token.EOF; iota++ { - list = append(list, f(p, p.leadComment, iota)) + list = append(list, f(p, p.leadComment, keyword, iota)) } rparen = p.expect(token.RPAREN) p.expectSemi() } else { - list = append(list, f(p, nil, 0)) + list = append(list, f(p, nil, keyword, 0)) } return &ast.GenDecl{ @@ -2262,14 +2315,11 @@ func (p *parser) parseDecl(sync func(*parser)) ast.Decl { var f parseSpecFunction switch p.tok { - case token.CONST: - f = parseConstSpec + case token.CONST, token.VAR: + f = (*parser).parseValueSpec case token.TYPE: - f = parseTypeSpec - - case token.VAR: - f = parseVarSpec + f = (*parser).parseTypeSpec case token.FUNC: return p.parseFuncDecl() @@ -2292,6 +2342,12 @@ func (p *parser) parseFile() *ast.File { defer un(trace(p, "File")) } + // Don't bother parsing the rest if we had errors scanning the first token. + // Likely not a Go source file at all. + if p.errors.Len() != 0 { + return nil + } + // package clause doc := p.leadComment pos := p.expect(token.PACKAGE) @@ -2303,15 +2359,19 @@ func (p *parser) parseFile() *ast.File { } p.expectSemi() - var decls []ast.Decl - - // Don't bother parsing the rest if we had errors already. + // Don't bother parsing the rest if we had errors parsing the package clause. // Likely not a Go source file at all. + if p.errors.Len() != 0 { + return nil + } - if p.errors.Len() == 0 && p.mode&PackageClauseOnly == 0 { + p.openScope() + p.pkgScope = p.topScope + var decls []ast.Decl + if p.mode&PackageClauseOnly == 0 { // import decls for p.tok == token.IMPORT { - decls = append(decls, p.parseGenDecl(token.IMPORT, parseImportSpec)) + decls = append(decls, p.parseGenDecl(token.IMPORT, (*parser).parseImportSpec)) } if p.mode&ImportsOnly == 0 { @@ -2321,8 +2381,9 @@ func (p *parser) parseFile() *ast.File { } } } - - assert(p.topScope == p.pkgScope, "imbalanced scopes") + p.closeScope() + assert(p.topScope == nil, "unbalanced scopes") + assert(p.labelScope == nil, "unbalanced label scopes") // resolve global identifiers within the same file i := 0 diff --git a/src/pkg/go/parser/parser_test.go b/src/pkg/go/parser/parser_test.go index 1b7a41b1b..1960377b0 100644 --- a/src/pkg/go/parser/parser_test.go +++ b/src/pkg/go/parser/parser_test.go @@ -135,6 +135,53 @@ func TestVarScope(t *testing.T) { } } +func TestObjects(t *testing.T) { + const src = ` +package p +import fmt "fmt" +const pi = 3.14 +type T struct{} +var x int +func f() { L: } +` + + f, err := ParseFile(fset, "", src, 0) + if err != nil { + t.Fatal(err) + } + + objects := map[string]ast.ObjKind{ + "p": ast.Bad, // not in a scope + "fmt": ast.Bad, // not resolved yet + "pi": ast.Con, + "T": ast.Typ, + "x": ast.Var, + "int": ast.Bad, // not resolved yet + "f": ast.Fun, + "L": ast.Lbl, + } + + ast.Inspect(f, func(n ast.Node) bool { + if ident, ok := n.(*ast.Ident); ok { + obj := ident.Obj + if obj == nil { + if objects[ident.Name] != ast.Bad { + t.Errorf("no object for %s", ident.Name) + } + return true + } + if obj.Name != ident.Name { + t.Errorf("names don't match: obj.Name = %s, ident.Name = %s", obj.Name, ident.Name) + } + kind := objects[ident.Name] + if obj.Kind != kind { + t.Errorf("%s: obj.Kind = %s; want %s", ident.Name, obj.Kind, kind) + } + } + return true + }) +} + func TestUnresolved(t *testing.T) { f, err := ParseFile(fset, "", ` package p diff --git a/src/pkg/go/parser/performance_test.go b/src/pkg/go/parser/performance_test.go new file mode 100644 index 000000000..f2732c0e2 --- /dev/null +++ b/src/pkg/go/parser/performance_test.go @@ -0,0 +1,30 @@ +// Copyright 2012 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package parser + +import ( + "go/token" + "io/ioutil" + "testing" +) + +var src = readFile("parser.go") + +func readFile(filename string) []byte { + data, err := ioutil.ReadFile(filename) + if err != nil { + panic(err) + } + return data +} + +func BenchmarkParse(b *testing.B) { + b.SetBytes(int64(len(src))) + for i := 0; i < b.N; i++ { + if _, err := ParseFile(token.NewFileSet(), "", src, ParseComments); err != nil { + b.Fatalf("benchmark failed due to parse error: %s", err) + } + } +} diff --git a/src/pkg/go/parser/short_test.go b/src/pkg/go/parser/short_test.go index 238492bf3..c62f7e050 100644 --- a/src/pkg/go/parser/short_test.go +++ b/src/pkg/go/parser/short_test.go @@ -13,8 +13,10 @@ var valids = []string{ `package p;`, `package p; import "fmt"; func f() { fmt.Println("Hello, World!") };`, `package p; func f() { if f(T{}) {} };`, - `package p; func f() { _ = (<-chan int)(x) };`, - `package p; func f() { _ = (<-chan <-chan int)(x) };`, + `package p; func f() { _ = <-chan int(nil) };`, + `package p; func f() { _ = (<-chan int)(nil) };`, + `package p; func f() { _ = (<-chan <-chan int)(nil) };`, + `package p; func f() { _ = <-chan <-chan <-chan <-chan <-int(nil) };`, `package p; func f(func() func() func());`, `package p; func f(...T);`, `package p; func f(float, ...int);`, @@ -64,8 +66,11 @@ var invalids = []string{ `package p; var a = []int{[ /* ERROR "expected expression" */ ]int};`, `package p; var a = ( /* ERROR "expected expression" */ []int);`, `package p; var a = a[[ /* ERROR "expected expression" */ ]int:[]int];`, - `package p; var a = <- /* ERROR "expected expression" */ chan int;`, - `package p; func f() { select { case _ <- chan /* ERROR "expected expression" */ int: } };`, + `package p; var a = <- /* ERROR "expected expression" */ chan int;`, + `package p; func f() { select { case _ <- chan /* ERROR "expected expression" */ int: } };`, + `package p; func f() { _ = (<-<- /* ERROR "expected 'chan'" */ chan int)(nil) };`, + `package p; func f() { _ = (<-chan<-chan<-chan<-chan<-chan<- /* ERROR "expected channel type" */ int)(nil) };`, + `package p; func f() { var t []int; t /* ERROR "expected identifier on left side of :=" */ [0] := 0 };`, } func TestInvalid(t *testing.T) { |