diff options
Diffstat (limited to 'src/pkg/go/parser/parser.go')
| -rw-r--r-- | src/pkg/go/parser/parser.go | 249 | 
1 files changed, 155 insertions, 94 deletions
| diff --git a/src/pkg/go/parser/parser.go b/src/pkg/go/parser/parser.go index 20e505d97..a021a5abe 100644 --- a/src/pkg/go/parser/parser.go +++ b/src/pkg/go/parser/parser.go @@ -28,7 +28,7 @@ type parser struct {  	// Tracing/debugging  	mode   Mode // parsing mode  	trace  bool // == (mode & Trace != 0) -	indent uint // indentation used for tracing output +	indent int  // indentation used for tracing output  	// Comments  	comments    []*ast.CommentGroup @@ -56,7 +56,7 @@ type parser struct {  	unresolved []*ast.Ident      // unresolved identifiers  	imports    []*ast.ImportSpec // list of imports -	// Label scope +	// Label scopes  	// (maintained by open/close LabelScope)  	labelScope  *ast.Scope     // label scope for current function  	targetStack [][]*ast.Ident // stack of unresolved labels @@ -75,14 +75,6 @@ func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode Mod  	p.trace = mode&Trace != 0 // for convenience (p.trace is used frequently)  	p.next() - -	// set up the pkgScope here (as opposed to in parseFile) because -	// there are other parser entry points (ParseExpr, etc.) -	p.openScope() -	p.pkgScope = p.topScope - -	// for the same reason, set up a label scope -	p.openLabelScope()  }  // ---------------------------------------------------------------------------- @@ -157,7 +149,7 @@ func (p *parser) shortVarDecl(decl *ast.AssignStmt, list []ast.Expr) {  				}  			}  		} else { -			p.errorExpected(x.Pos(), "identifier") +			p.errorExpected(x.Pos(), "identifier on left side of :=")  		}  	}  	if n == 0 && p.mode&DeclarationErrors != 0 { @@ -170,7 +162,12 @@ func (p *parser) shortVarDecl(decl *ast.AssignStmt, list []ast.Expr) {  // internal consistency.  var unresolved = new(ast.Object) -func (p *parser) resolve(x ast.Expr) { +// If x is an identifier, tryResolve attempts to resolve x by looking up +// the object it denotes. If no object is found and collectUnresolved is +// set, x is marked as unresolved and collected in the list of unresolved +// identifiers. +// +func (p *parser) tryResolve(x ast.Expr, collectUnresolved bool) {  	// nothing to do if x is not an identifier or the blank identifier  	ident, _ := x.(*ast.Ident)  	if ident == nil { @@ -191,23 +188,30 @@ func (p *parser) resolve(x ast.Expr) {  	// must be found either in the file scope, package scope  	// (perhaps in another file), or universe scope --- collect  	// them so that they can be resolved later -	ident.Obj = unresolved -	p.unresolved = append(p.unresolved, ident) +	if collectUnresolved { +		ident.Obj = unresolved +		p.unresolved = append(p.unresolved, ident) +	} +} + +func (p *parser) resolve(x ast.Expr) { +	p.tryResolve(x, true)  }  // ----------------------------------------------------------------------------  // Parsing support  func (p *parser) printTrace(a ...interface{}) { -	const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " + -		". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " -	const n = uint(len(dots)) +	const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " +	const n = len(dots)  	pos := p.file.Position(p.pos)  	fmt.Printf("%5d:%3d: ", pos.Line, pos.Column)  	i := 2 * p.indent -	for ; i > n; i -= n { +	for i > n {  		fmt.Print(dots) +		i -= n  	} +	// i <= n  	fmt.Print(dots[0:i])  	fmt.Println(a...)  } @@ -218,7 +222,7 @@ func trace(p *parser, msg string) *parser {  	return p  } -// Usage pattern: defer un(trace(p, "...")); +// Usage pattern: defer un(trace(p, "..."))  func un(p *parser) {  	p.indent--  	p.printTrace(")") @@ -304,14 +308,14 @@ func (p *parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline  func (p *parser) next() {  	p.leadComment = nil  	p.lineComment = nil -	line := p.file.Line(p.pos) // current line +	prev := p.pos  	p.next0()  	if p.tok == token.COMMENT {  		var comment *ast.CommentGroup  		var endline int -		if p.file.Line(p.pos) == line { +		if p.file.Line(p.pos) == p.file.Line(prev) {  			// The comment is on same line as the previous token; it  			// cannot be a lead comment but may be a line comment.  			comment, endline = p.consumeCommentGroup(0) @@ -336,8 +340,26 @@ func (p *parser) next() {  	}  } +// A bailout panic is raised to indicate early termination. +type bailout struct{} +  func (p *parser) error(pos token.Pos, msg string) { -	p.errors.Add(p.file.Position(pos), msg) +	epos := p.file.Position(pos) + +	// If AllErrors is not set, discard errors reported on the same line +	// as the last recorded error and stop parsing if there are more than +	// 10 errors. +	if p.mode&AllErrors == 0 { +		n := len(p.errors) +		if n > 0 && p.errors[n-1].Pos.Line == epos.Line { +			return // discard - likely a spurious error +		} +		if n > 10 { +			panic(bailout{}) +		} +	} + +	p.errors.Add(epos, msg)  }  func (p *parser) errorExpected(pos token.Pos, msg string) { @@ -585,14 +607,15 @@ func (p *parser) parseTypeName() ast.Expr {  	return ident  } -func (p *parser) parseArrayType(ellipsisOk bool) ast.Expr { +func (p *parser) parseArrayType() ast.Expr {  	if p.trace {  		defer un(trace(p, "ArrayType"))  	}  	lbrack := p.expect(token.LBRACK)  	var len ast.Expr -	if ellipsisOk && p.tok == token.ELLIPSIS { +	// always permit ellipsis for more fault-tolerant parsing +	if p.tok == token.ELLIPSIS {  		len = &ast.Ellipsis{Ellipsis: p.pos}  		p.next()  	} else if p.tok != token.RBRACK { @@ -704,7 +727,7 @@ func (p *parser) tryVarType(isParam bool) ast.Expr {  	if isParam && p.tok == token.ELLIPSIS {  		pos := p.pos  		p.next() -		typ := p.tryIdentOrType(isParam) // don't use parseType so we can provide better error message +		typ := p.tryIdentOrType() // don't use parseType so we can provide better error message  		if typ != nil {  			p.resolve(typ)  		} else { @@ -713,7 +736,7 @@ func (p *parser) tryVarType(isParam bool) ast.Expr {  		}  		return &ast.Ellipsis{Ellipsis: pos, Elt: typ}  	} -	return p.tryIdentOrType(false) +	return p.tryIdentOrType()  }  // If the result is an identifier, it is not resolved. @@ -931,29 +954,31 @@ func (p *parser) parseChanType() *ast.ChanType {  	pos := p.pos  	dir := ast.SEND | ast.RECV +	var arrow token.Pos  	if p.tok == token.CHAN {  		p.next()  		if p.tok == token.ARROW { +			arrow = p.pos  			p.next()  			dir = ast.SEND  		}  	} else { -		p.expect(token.ARROW) +		arrow = p.expect(token.ARROW)  		p.expect(token.CHAN)  		dir = ast.RECV  	}  	value := p.parseType() -	return &ast.ChanType{Begin: pos, Dir: dir, Value: value} +	return &ast.ChanType{Begin: pos, Arrow: arrow, Dir: dir, Value: value}  }  // If the result is an identifier, it is not resolved. -func (p *parser) tryIdentOrType(ellipsisOk bool) ast.Expr { +func (p *parser) tryIdentOrType() ast.Expr {  	switch p.tok {  	case token.IDENT:  		return p.parseTypeName()  	case token.LBRACK: -		return p.parseArrayType(ellipsisOk) +		return p.parseArrayType()  	case token.STRUCT:  		return p.parseStructType()  	case token.MUL: @@ -980,7 +1005,7 @@ func (p *parser) tryIdentOrType(ellipsisOk bool) ast.Expr {  }  func (p *parser) tryType() ast.Expr { -	typ := p.tryIdentOrType(false) +	typ := p.tryIdentOrType()  	if typ != nil {  		p.resolve(typ)  	} @@ -1088,7 +1113,7 @@ func (p *parser) parseOperand(lhs bool) ast.Expr {  		return p.parseFuncTypeOrLit()  	} -	if typ := p.tryIdentOrType(true); typ != nil { +	if typ := p.tryIdentOrType(); typ != nil {  		// could be type for composite literal or conversion  		_, isIdent := typ.(*ast.Ident)  		assert(!isIdent, "type cannot be identifier") @@ -1193,14 +1218,35 @@ func (p *parser) parseElement(keyOk bool) ast.Expr {  		return p.parseLiteralValue(nil)  	} -	x := p.checkExpr(p.parseExpr(keyOk)) // don't resolve if map key +	// Because the parser doesn't know the composite literal type, it cannot +	// know if a key that's an identifier is a struct field name or a name +	// denoting a value. The former is not resolved by the parser or the +	// resolver. +	// +	// Instead, _try_ to resolve such a key if possible. If it resolves, +	// it a) has correctly resolved, or b) incorrectly resolved because +	// the key is a struct field with a name matching another identifier. +	// In the former case we are done, and in the latter case we don't +	// care because the type checker will do a separate field lookup. +	// +	// If the key does not resolve, it a) must be defined at the top +	// level in another file of the same package, the universe scope, or be +	// undeclared; or b) it is a struct field. In the former case, the type +	// checker can do a top-level lookup, and in the latter case it will do +	// a separate field lookup. +	x := p.checkExpr(p.parseExpr(keyOk))  	if keyOk {  		if p.tok == token.COLON {  			colon := p.pos  			p.next() +			// Try to resolve the key but don't collect it +			// as unresolved identifier if it fails so that +			// we don't get (possibly false) errors about +			// undeclared names. +			p.tryResolve(x, false)  			return &ast.KeyValueExpr{Key: x, Colon: colon, Value: p.parseElement(false)}  		} -		p.resolve(x) // not a map key +		p.resolve(x) // not a key  	}  	return x @@ -1404,16 +1450,49 @@ func (p *parser) parseUnaryExpr(lhs bool) ast.Expr {  	case token.ARROW:  		// channel type or receive expression -		pos := p.pos +		arrow := p.pos  		p.next() -		if p.tok == token.CHAN { -			p.next() -			value := p.parseType() -			return &ast.ChanType{Begin: pos, Dir: ast.RECV, Value: value} -		} + +		// If the next token is token.CHAN we still don't know if it +		// is a channel type or a receive operation - we only know +		// once we have found the end of the unary expression. There +		// are two cases: +		// +		//   <- type  => (<-type) must be channel type +		//   <- expr  => <-(expr) is a receive from an expression +		// +		// In the first case, the arrow must be re-associated with +		// the channel type parsed already: +		// +		//   <- (chan type)    =>  (<-chan type) +		//   <- (chan<- type)  =>  (<-chan (<-type))  		x := p.parseUnaryExpr(false) -		return &ast.UnaryExpr{OpPos: pos, Op: token.ARROW, X: p.checkExpr(x)} + +		// determine which case we have +		if typ, ok := x.(*ast.ChanType); ok { +			// (<-type) + +			// re-associate position info and <- +			dir := ast.SEND +			for ok && dir == ast.SEND { +				if typ.Dir == ast.RECV { +					// error: (<-type) is (<-(<-chan T)) +					p.errorExpected(typ.Arrow, "'chan'") +				} +				arrow, typ.Begin, typ.Arrow = typ.Arrow, arrow, arrow +				dir, typ.Dir = typ.Dir, ast.RECV +				typ, ok = typ.Value.(*ast.ChanType) +			} +			if dir == ast.SEND { +				p.errorExpected(arrow, "channel type") +			} + +			return x +		} + +		// <-(expr) +		return &ast.UnaryExpr{OpPos: arrow, Op: token.ARROW, X: p.checkExpr(x)}  	case token.MUL:  		// pointer type or unary "*" expression @@ -1774,7 +1853,7 @@ func (p *parser) parseSwitchStmt() ast.Stmt {  				//  				//	switch t := 0; t := x.(T) { ... }  				// -				// (this code is not valid Go because the first t will +				// (this code is not valid Go because the first t  				// cannot be accessed and thus is never used, the extra  				// scope is needed for the correct error message).  				// @@ -2012,7 +2091,7 @@ func (p *parser) parseStmt() (s ast.Stmt) {  // ----------------------------------------------------------------------------  // Declarations -type parseSpecFunction func(p *parser, doc *ast.CommentGroup, iota int) ast.Spec +type parseSpecFunction func(p *parser, doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec  func isValidImport(lit string) bool {  	const illegalChars = `!"#$%&'()*,:;<=>?[\]^{|}` + "`\uFFFD" @@ -2025,7 +2104,7 @@ func isValidImport(lit string) bool {  	return s != ""  } -func parseImportSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec { +func (p *parser) parseImportSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {  	if p.trace {  		defer un(trace(p, "ImportSpec"))  	} @@ -2063,15 +2142,15 @@ func parseImportSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec {  	return spec  } -func parseConstSpec(p *parser, doc *ast.CommentGroup, iota int) ast.Spec { +func (p *parser) parseValueSpec(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec {  	if p.trace { -		defer un(trace(p, "ConstSpec")) +		defer un(trace(p, keyword.String()+"Spec"))  	}  	idents := p.parseIdentList()  	typ := p.tryType()  	var values []ast.Expr -	if typ != nil || p.tok == token.ASSIGN || iota == 0 { +	if p.tok == token.ASSIGN || keyword == token.CONST && (typ != nil || iota == 0) || keyword == token.VAR && typ == nil {  		p.expect(token.ASSIGN)  		values = p.parseRhsList()  	} @@ -2088,12 +2167,16 @@ func parseConstSpec(p *parser, doc *ast.CommentGroup, iota int) ast.Spec {  		Values:  values,  		Comment: p.lineComment,  	} -	p.declare(spec, iota, p.topScope, ast.Con, idents...) +	kind := ast.Con +	if keyword == token.VAR { +		kind = ast.Var +	} +	p.declare(spec, iota, p.topScope, kind, idents...)  	return spec  } -func parseTypeSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec { +func (p *parser) parseTypeSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {  	if p.trace {  		defer un(trace(p, "TypeSpec"))  	} @@ -2114,36 +2197,6 @@ func parseTypeSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec {  	return spec  } -func parseVarSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec { -	if p.trace { -		defer un(trace(p, "VarSpec")) -	} - -	idents := p.parseIdentList() -	typ := p.tryType() -	var values []ast.Expr -	if typ == nil || p.tok == token.ASSIGN { -		p.expect(token.ASSIGN) -		values = p.parseRhsList() -	} -	p.expectSemi() // call before accessing p.linecomment - -	// Go spec: The scope of a constant or variable identifier declared inside -	// a function begins at the end of the ConstSpec or VarSpec and ends at -	// the end of the innermost containing block. -	// (Global identifiers are resolved in a separate phase after parsing.) -	spec := &ast.ValueSpec{ -		Doc:     doc, -		Names:   idents, -		Type:    typ, -		Values:  values, -		Comment: p.lineComment, -	} -	p.declare(spec, nil, p.topScope, ast.Var, idents...) - -	return spec -} -  func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl {  	if p.trace {  		defer un(trace(p, "GenDecl("+keyword.String()+")")) @@ -2157,12 +2210,12 @@ func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.Gen  		lparen = p.pos  		p.next()  		for iota := 0; p.tok != token.RPAREN && p.tok != token.EOF; iota++ { -			list = append(list, f(p, p.leadComment, iota)) +			list = append(list, f(p, p.leadComment, keyword, iota))  		}  		rparen = p.expect(token.RPAREN)  		p.expectSemi()  	} else { -		list = append(list, f(p, nil, 0)) +		list = append(list, f(p, nil, keyword, 0))  	}  	return &ast.GenDecl{ @@ -2262,14 +2315,11 @@ func (p *parser) parseDecl(sync func(*parser)) ast.Decl {  	var f parseSpecFunction  	switch p.tok { -	case token.CONST: -		f = parseConstSpec +	case token.CONST, token.VAR: +		f = (*parser).parseValueSpec  	case token.TYPE: -		f = parseTypeSpec - -	case token.VAR: -		f = parseVarSpec +		f = (*parser).parseTypeSpec  	case token.FUNC:  		return p.parseFuncDecl() @@ -2292,6 +2342,12 @@ func (p *parser) parseFile() *ast.File {  		defer un(trace(p, "File"))  	} +	// Don't bother parsing the rest if we had errors scanning the first token. +	// Likely not a Go source file at all. +	if p.errors.Len() != 0 { +		return nil +	} +  	// package clause  	doc := p.leadComment  	pos := p.expect(token.PACKAGE) @@ -2303,15 +2359,19 @@ func (p *parser) parseFile() *ast.File {  	}  	p.expectSemi() -	var decls []ast.Decl - -	// Don't bother parsing the rest if we had errors already. +	// Don't bother parsing the rest if we had errors parsing the package clause.  	// Likely not a Go source file at all. +	if p.errors.Len() != 0 { +		return nil +	} -	if p.errors.Len() == 0 && p.mode&PackageClauseOnly == 0 { +	p.openScope() +	p.pkgScope = p.topScope +	var decls []ast.Decl +	if p.mode&PackageClauseOnly == 0 {  		// import decls  		for p.tok == token.IMPORT { -			decls = append(decls, p.parseGenDecl(token.IMPORT, parseImportSpec)) +			decls = append(decls, p.parseGenDecl(token.IMPORT, (*parser).parseImportSpec))  		}  		if p.mode&ImportsOnly == 0 { @@ -2321,8 +2381,9 @@ func (p *parser) parseFile() *ast.File {  			}  		}  	} - -	assert(p.topScope == p.pkgScope, "imbalanced scopes") +	p.closeScope() +	assert(p.topScope == nil, "unbalanced scopes") +	assert(p.labelScope == nil, "unbalanced label scopes")  	// resolve global identifiers within the same file  	i := 0 | 
