summaryrefslogtreecommitdiff
path: root/src/pkg/go/ast
diff options
context:
space:
mode:
Diffstat (limited to 'src/pkg/go/ast')
-rw-r--r--src/pkg/go/ast/ast.go3
-rw-r--r--src/pkg/go/ast/commentmap.go332
-rw-r--r--src/pkg/go/ast/commentmap_test.go143
-rw-r--r--src/pkg/go/ast/filter.go2
-rw-r--r--src/pkg/go/ast/import.go2
-rw-r--r--src/pkg/go/ast/print.go79
-rw-r--r--src/pkg/go/ast/print_test.go21
-rw-r--r--src/pkg/go/ast/resolve.go4
-rw-r--r--src/pkg/go/ast/scope.go14
-rw-r--r--src/pkg/go/ast/walk.go7
10 files changed, 566 insertions, 41 deletions
diff --git a/src/pkg/go/ast/ast.go b/src/pkg/go/ast/ast.go
index d2e75dc1c..bf533d1d2 100644
--- a/src/pkg/go/ast/ast.go
+++ b/src/pkg/go/ast/ast.go
@@ -407,6 +407,7 @@ type (
// A ChanType node represents a channel type.
ChanType struct {
Begin token.Pos // position of "chan" keyword or "<-" (whichever comes first)
+ Arrow token.Pos // position of "<-" (noPos if there is no "<-")
Dir ChanDir // channel direction
Value Expr // value type
}
@@ -554,7 +555,7 @@ type (
// A DeclStmt node represents a declaration in a statement list.
DeclStmt struct {
- Decl Decl
+ Decl Decl // *GenDecl with CONST, TYPE, or VAR token
}
// An EmptyStmt node represents an empty statement.
diff --git a/src/pkg/go/ast/commentmap.go b/src/pkg/go/ast/commentmap.go
new file mode 100644
index 000000000..252d460af
--- /dev/null
+++ b/src/pkg/go/ast/commentmap.go
@@ -0,0 +1,332 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ast
+
+import (
+ "bytes"
+ "fmt"
+ "go/token"
+ "sort"
+)
+
+type byPos []*CommentGroup
+
+func (a byPos) Len() int { return len(a) }
+func (a byPos) Less(i, j int) bool { return a[i].Pos() < a[j].Pos() }
+func (a byPos) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
+
+// sortComments sorts the list of comment groups in source order.
+//
+func sortComments(list []*CommentGroup) {
+ // TODO(gri): Does it make sense to check for sorted-ness
+ // first (because we know that sorted-ness is
+ // very likely)?
+ if orderedList := byPos(list); !sort.IsSorted(orderedList) {
+ sort.Sort(orderedList)
+ }
+}
+
+// A CommentMap maps an AST node to a list of comment groups
+// associated with it. See NewCommentMap for a description of
+// the association.
+//
+type CommentMap map[Node][]*CommentGroup
+
+func (cmap CommentMap) addComment(n Node, c *CommentGroup) {
+ list := cmap[n]
+ if len(list) == 0 {
+ list = []*CommentGroup{c}
+ } else {
+ list = append(list, c)
+ }
+ cmap[n] = list
+}
+
+type byInterval []Node
+
+func (a byInterval) Len() int { return len(a) }
+func (a byInterval) Less(i, j int) bool {
+ pi, pj := a[i].Pos(), a[j].Pos()
+ return pi < pj || pi == pj && a[i].End() > a[j].End()
+}
+func (a byInterval) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
+
+// nodeList returns the list of nodes of the AST n in source order.
+//
+func nodeList(n Node) []Node {
+ var list []Node
+ Inspect(n, func(n Node) bool {
+ // don't collect comments
+ switch n.(type) {
+ case nil, *CommentGroup, *Comment:
+ return false
+ }
+ list = append(list, n)
+ return true
+ })
+ // Note: The current implementation assumes that Inspect traverses the
+ // AST in depth-first and thus _source_ order. If AST traversal
+ // does not follow source order, the sorting call below will be
+ // required.
+ // sort.Sort(byInterval(list))
+ return list
+}
+
+// A commentListReader helps iterating through a list of comment groups.
+//
+type commentListReader struct {
+ fset *token.FileSet
+ list []*CommentGroup
+ index int
+ comment *CommentGroup // comment group at current index
+ pos, end token.Position // source interval of comment group at current index
+}
+
+func (r *commentListReader) eol() bool {
+ return r.index >= len(r.list)
+}
+
+func (r *commentListReader) next() {
+ if !r.eol() {
+ r.comment = r.list[r.index]
+ r.pos = r.fset.Position(r.comment.Pos())
+ r.end = r.fset.Position(r.comment.End())
+ r.index++
+ }
+}
+
+// A nodeStack keeps track of nested nodes.
+// A node lower on the stack lexically contains the nodes higher on the stack.
+//
+type nodeStack []Node
+
+// push pops all nodes that appear lexically before n
+// and then pushes n on the stack.
+//
+func (s *nodeStack) push(n Node) {
+ s.pop(n.Pos())
+ *s = append((*s), n)
+}
+
+// pop pops all nodes that appear lexically before pos
+// (i.e., whose lexical extent has ended before or at pos).
+// It returns the last node popped.
+//
+func (s *nodeStack) pop(pos token.Pos) (top Node) {
+ i := len(*s)
+ for i > 0 && (*s)[i-1].End() <= pos {
+ top = (*s)[i-1]
+ i--
+ }
+ *s = (*s)[0:i]
+ return top
+}
+
+// NewCommentMap creates a new comment map by associating comment groups
+// of the comments list with the nodes of the AST specified by node.
+//
+// A comment group g is associated with a node n if:
+//
+// - g starts on the same line as n ends
+// - g starts on the line immediately following n, and there is
+// at least one empty line after g and before the next node
+// - g starts before n and is not associated to the node before n
+// via the previous rules
+//
+// NewCommentMap tries to associate a comment group to the "largest"
+// node possible: For instance, if the comment is a line comment
+// trailing an assignment, the comment is associated with the entire
+// assignment rather than just the last operand in the assignment.
+//
+func NewCommentMap(fset *token.FileSet, node Node, comments []*CommentGroup) CommentMap {
+ if len(comments) == 0 {
+ return nil // no comments to map
+ }
+
+ cmap := make(CommentMap)
+
+ // set up comment reader r
+ tmp := make([]*CommentGroup, len(comments))
+ copy(tmp, comments) // don't change incomming comments
+ sortComments(tmp)
+ r := commentListReader{fset: fset, list: tmp} // !r.eol() because len(comments) > 0
+ r.next()
+
+ // create node list in lexical order
+ nodes := nodeList(node)
+ nodes = append(nodes, nil) // append sentinel
+
+ // set up iteration variables
+ var (
+ p Node // previous node
+ pend token.Position // end of p
+ pg Node // previous node group (enclosing nodes of "importance")
+ pgend token.Position // end of pg
+ stack nodeStack // stack of node groups
+ )
+
+ for _, q := range nodes {
+ var qpos token.Position
+ if q != nil {
+ qpos = fset.Position(q.Pos()) // current node position
+ } else {
+ // set fake sentinel position to infinity so that
+ // all comments get processed before the sentinel
+ const infinity = 1 << 30
+ qpos.Offset = infinity
+ qpos.Line = infinity
+ }
+
+ // process comments before current node
+ for r.end.Offset <= qpos.Offset {
+ // determine recent node group
+ if top := stack.pop(r.comment.Pos()); top != nil {
+ pg = top
+ pgend = fset.Position(pg.End())
+ }
+ // Try to associate a comment first with a node group
+ // (i.e., a node of "importance" such as a declaration);
+ // if that fails, try to associate it with the most recent
+ // node.
+ // TODO(gri) try to simplify the logic below
+ var assoc Node
+ switch {
+ case pg != nil &&
+ (pgend.Line == r.pos.Line ||
+ pgend.Line+1 == r.pos.Line && r.end.Line+1 < qpos.Line):
+ // 1) comment starts on same line as previous node group ends, or
+ // 2) comment starts on the line immediately after the
+ // previous node group and there is an empty line before
+ // the current node
+ // => associate comment with previous node group
+ assoc = pg
+ case p != nil &&
+ (pend.Line == r.pos.Line ||
+ pend.Line+1 == r.pos.Line && r.end.Line+1 < qpos.Line ||
+ q == nil):
+ // same rules apply as above for p rather than pg,
+ // but also associate with p if we are at the end (q == nil)
+ assoc = p
+ default:
+ // otherwise, associate comment with current node
+ if q == nil {
+ // we can only reach here if there was no p
+ // which would imply that there were no nodes
+ panic("internal error: no comments should be associated with sentinel")
+ }
+ assoc = q
+ }
+ cmap.addComment(assoc, r.comment)
+ if r.eol() {
+ return cmap
+ }
+ r.next()
+ }
+
+ // update previous node
+ p = q
+ pend = fset.Position(p.End())
+
+ // update previous node group if we see an "important" node
+ switch q.(type) {
+ case *File, *Field, Decl, Spec, Stmt:
+ stack.push(q)
+ }
+ }
+
+ return cmap
+}
+
+// Update replaces an old node in the comment map with the new node
+// and returns the new node. Comments that were associated with the
+// old node are associated with the new node.
+//
+func (cmap CommentMap) Update(old, new Node) Node {
+ if list := cmap[old]; len(list) > 0 {
+ delete(cmap, old)
+ cmap[new] = append(cmap[new], list...)
+ }
+ return new
+}
+
+// Filter returns a new comment map consisting of only those
+// entries of cmap for which a corresponding node exists in
+// the AST specified by node.
+//
+func (cmap CommentMap) Filter(node Node) CommentMap {
+ umap := make(CommentMap)
+ Inspect(node, func(n Node) bool {
+ if g := cmap[n]; len(g) > 0 {
+ umap[n] = g
+ }
+ return true
+ })
+ return umap
+}
+
+// Comments returns the list of comment groups in the comment map.
+// The result is sorted is source order.
+//
+func (cmap CommentMap) Comments() []*CommentGroup {
+ list := make([]*CommentGroup, 0, len(cmap))
+ for _, e := range cmap {
+ list = append(list, e...)
+ }
+ sortComments(list)
+ return list
+}
+
+func summary(list []*CommentGroup) string {
+ const maxLen = 40
+ var buf bytes.Buffer
+
+ // collect comments text
+loop:
+ for _, group := range list {
+ // Note: CommentGroup.Text() does too much work for what we
+ // need and would only replace this innermost loop.
+ // Just do it explicitly.
+ for _, comment := range group.List {
+ if buf.Len() >= maxLen {
+ break loop
+ }
+ buf.WriteString(comment.Text)
+ }
+ }
+
+ // truncate if too long
+ if buf.Len() > maxLen {
+ buf.Truncate(maxLen - 3)
+ buf.WriteString("...")
+ }
+
+ // replace any invisibles with blanks
+ bytes := buf.Bytes()
+ for i, b := range bytes {
+ switch b {
+ case '\t', '\n', '\r':
+ bytes[i] = ' '
+ }
+ }
+
+ return string(bytes)
+}
+
+func (cmap CommentMap) String() string {
+ var buf bytes.Buffer
+ fmt.Fprintln(&buf, "CommentMap {")
+ for node, comment := range cmap {
+ // print name of identifiers; print node type for other nodes
+ var s string
+ if ident, ok := node.(*Ident); ok {
+ s = ident.Name
+ } else {
+ s = fmt.Sprintf("%T", node)
+ }
+ fmt.Fprintf(&buf, "\t%p %20s: %s\n", node, s, summary(comment))
+ }
+ fmt.Fprintln(&buf, "}")
+ return buf.String()
+}
diff --git a/src/pkg/go/ast/commentmap_test.go b/src/pkg/go/ast/commentmap_test.go
new file mode 100644
index 000000000..e372eab74
--- /dev/null
+++ b/src/pkg/go/ast/commentmap_test.go
@@ -0,0 +1,143 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// To avoid a cyclic dependency with go/parser, this file is in a separate package.
+
+package ast_test
+
+import (
+ "bytes"
+ "fmt"
+ . "go/ast"
+ "go/parser"
+ "go/token"
+ "sort"
+ "testing"
+)
+
+const src = `
+// the very first comment
+
+// package p
+package p /* the name is p */
+
+// imports
+import (
+ "bytes" // bytes
+ "fmt" // fmt
+ "go/ast"
+ "go/parser"
+)
+
+// T
+type T struct {
+ a, b, c int // associated with a, b, c
+ // associated with x, y
+ x, y float64 // float values
+ z complex128 // complex value
+}
+// also associated with T
+
+// x
+var x = 0 // x = 0
+// also associated with x
+
+// f1
+func f1() {
+ /* associated with s1 */
+ s1()
+ // also associated with s1
+
+ // associated with s2
+
+ // also associated with s2
+ s2() // line comment for s2
+}
+// associated with f1
+// also associated with f1
+
+// associated with f2
+
+// f2
+func f2() {
+}
+
+func f3() {
+ i := 1 /* 1 */ + 2 // addition
+ _ = i
+}
+
+// the very last comment
+`
+
+// res maps a key of the form "line number: node type"
+// to the associated comments' text.
+//
+var res = map[string]string{
+ " 5: *ast.File": "the very first comment\npackage p\n",
+ " 5: *ast.Ident": " the name is p\n",
+ " 8: *ast.GenDecl": "imports\n",
+ " 9: *ast.ImportSpec": "bytes\n",
+ "10: *ast.ImportSpec": "fmt\n",
+ "16: *ast.GenDecl": "T\nalso associated with T\n",
+ "17: *ast.Field": "associated with a, b, c\n",
+ "19: *ast.Field": "associated with x, y\nfloat values\n",
+ "20: *ast.Field": "complex value\n",
+ "25: *ast.GenDecl": "x\nx = 0\nalso associated with x\n",
+ "29: *ast.FuncDecl": "f1\nassociated with f1\nalso associated with f1\n",
+ "31: *ast.ExprStmt": " associated with s1\nalso associated with s1\n",
+ "37: *ast.ExprStmt": "associated with s2\nalso associated with s2\nline comment for s2\n",
+ "45: *ast.FuncDecl": "associated with f2\nf2\n",
+ "49: *ast.AssignStmt": "addition\n",
+ "49: *ast.BasicLit": " 1\n",
+ "50: *ast.Ident": "the very last comment\n",
+}
+
+func ctext(list []*CommentGroup) string {
+ var buf bytes.Buffer
+ for _, g := range list {
+ buf.WriteString(g.Text())
+ }
+ return buf.String()
+}
+
+func TestCommentMap(t *testing.T) {
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, "", src, parser.ParseComments)
+ if err != nil {
+ t.Fatal(err)
+ }
+ cmap := NewCommentMap(fset, f, f.Comments)
+
+ // very correct association of comments
+ for n, list := range cmap {
+ key := fmt.Sprintf("%2d: %T", fset.Position(n.Pos()).Line, n)
+ got := ctext(list)
+ want := res[key]
+ if got != want {
+ t.Errorf("%s: got %q; want %q", key, got, want)
+ }
+ }
+
+ // verify that no comments got lost
+ if n := len(cmap.Comments()); n != len(f.Comments) {
+ t.Errorf("got %d comment groups in map; want %d", n, len(f.Comments))
+ }
+
+ // support code to update test:
+ // set genMap to true to generate res map
+ const genMap = false
+ if genMap {
+ out := make([]string, 0, len(cmap))
+ for n, list := range cmap {
+ out = append(out, fmt.Sprintf("\t\"%2d: %T\":\t%q,", fset.Position(n.Pos()).Line, n, ctext(list)))
+ }
+ sort.Strings(out)
+ for _, s := range out {
+ fmt.Println(s)
+ }
+ }
+}
+
+// TODO(gri): add tests for Filter.
diff --git a/src/pkg/go/ast/filter.go b/src/pkg/go/ast/filter.go
index 4a89b8909..4db5814cb 100644
--- a/src/pkg/go/ast/filter.go
+++ b/src/pkg/go/ast/filter.go
@@ -414,7 +414,7 @@ func MergePackageFiles(pkg *Package, mode MergeMode) *File {
if path := imp.Path.Value; !seen[path] {
// TODO: consider handling cases where:
// - 2 imports exist with the same import path but
- // have different local names (one should probably
+ // have different local names (one should probably
// keep both of them)
// - 2 imports exist but only one has a comment
// - 2 imports exist and they both have (possibly
diff --git a/src/pkg/go/ast/import.go b/src/pkg/go/ast/import.go
index 2d4f69aae..a68a4840f 100644
--- a/src/pkg/go/ast/import.go
+++ b/src/pkg/go/ast/import.go
@@ -20,7 +20,7 @@ func SortImports(fset *token.FileSet, f *File) {
break
}
- if d.Lparen == token.NoPos {
+ if !d.Lparen.IsValid() {
// Not a block: sorted by default.
continue
}
diff --git a/src/pkg/go/ast/print.go b/src/pkg/go/ast/print.go
index 02cf9e022..4a1ce480f 100644
--- a/src/pkg/go/ast/print.go
+++ b/src/pkg/go/ast/print.go
@@ -34,7 +34,8 @@ func NotNilFilter(_ string, v reflect.Value) bool {
//
// A non-nil FieldFilter f may be provided to control the output:
// struct fields for which f(fieldname, fieldvalue) is true are
-// are printed; all others are filtered from the output.
+// are printed; all others are filtered from the output. Unexported
+// struct fields are never printed.
//
func Fprint(w io.Writer, fset *token.FileSet, x interface{}, f FieldFilter) (err error) {
// setup printer
@@ -107,8 +108,10 @@ func (p *printer) Write(data []byte) (n int, err error) {
}
p.last = b
}
- m, err = p.output.Write(data[n:])
- n += m
+ if len(data) > n {
+ m, err = p.output.Write(data[n:])
+ n += m
+ }
return
}
@@ -145,15 +148,18 @@ func (p *printer) print(x reflect.Value) {
p.print(x.Elem())
case reflect.Map:
- p.printf("%s (len = %d) {\n", x.Type(), x.Len())
- p.indent++
- for _, key := range x.MapKeys() {
- p.print(key)
- p.printf(": ")
- p.print(x.MapIndex(key))
+ p.printf("%s (len = %d) {", x.Type(), x.Len())
+ if x.Len() > 0 {
+ p.indent++
p.printf("\n")
+ for _, key := range x.MapKeys() {
+ p.print(key)
+ p.printf(": ")
+ p.print(x.MapIndex(key))
+ p.printf("\n")
+ }
+ p.indent--
}
- p.indent--
p.printf("}")
case reflect.Ptr:
@@ -169,32 +175,57 @@ func (p *printer) print(x reflect.Value) {
p.print(x.Elem())
}
+ case reflect.Array:
+ p.printf("%s {", x.Type())
+ if x.Len() > 0 {
+ p.indent++
+ p.printf("\n")
+ for i, n := 0, x.Len(); i < n; i++ {
+ p.printf("%d: ", i)
+ p.print(x.Index(i))
+ p.printf("\n")
+ }
+ p.indent--
+ }
+ p.printf("}")
+
case reflect.Slice:
if s, ok := x.Interface().([]byte); ok {
p.printf("%#q", s)
return
}
- p.printf("%s (len = %d) {\n", x.Type(), x.Len())
- p.indent++
- for i, n := 0, x.Len(); i < n; i++ {
- p.printf("%d: ", i)
- p.print(x.Index(i))
+ p.printf("%s (len = %d) {", x.Type(), x.Len())
+ if x.Len() > 0 {
+ p.indent++
p.printf("\n")
+ for i, n := 0, x.Len(); i < n; i++ {
+ p.printf("%d: ", i)
+ p.print(x.Index(i))
+ p.printf("\n")
+ }
+ p.indent--
}
- p.indent--
p.printf("}")
case reflect.Struct:
- p.printf("%s {\n", x.Type())
- p.indent++
t := x.Type()
+ p.printf("%s {", t)
+ p.indent++
+ first := true
for i, n := 0, t.NumField(); i < n; i++ {
- name := t.Field(i).Name
- value := x.Field(i)
- if p.filter == nil || p.filter(name, value) {
- p.printf("%s: ", name)
- p.print(value)
- p.printf("\n")
+ // exclude non-exported fields because their
+ // values cannot be accessed via reflection
+ if name := t.Field(i).Name; IsExported(name) {
+ value := x.Field(i)
+ if p.filter == nil || p.filter(name, value) {
+ if first {
+ p.printf("\n")
+ first = false
+ }
+ p.printf("%s: ", name)
+ p.print(value)
+ p.printf("\n")
+ }
}
}
p.indent--
diff --git a/src/pkg/go/ast/print_test.go b/src/pkg/go/ast/print_test.go
index 71c028e75..210f16430 100644
--- a/src/pkg/go/ast/print_test.go
+++ b/src/pkg/go/ast/print_test.go
@@ -23,6 +23,7 @@ var tests = []struct {
{"foobar", "0 \"foobar\""},
// maps
+ {map[Expr]string{}, `0 map[ast.Expr]string (len = 0) {}`},
{map[string]int{"a": 1},
`0 map[string]int (len = 1) {
1 . "a": 1
@@ -31,7 +32,21 @@ var tests = []struct {
// pointers
{new(int), "0 *0"},
+ // arrays
+ {[0]int{}, `0 [0]int {}`},
+ {[3]int{1, 2, 3},
+ `0 [3]int {
+ 1 . 0: 1
+ 2 . 1: 2
+ 3 . 2: 3
+ 4 }`},
+ {[...]int{42},
+ `0 [1]int {
+ 1 . 0: 42
+ 2 }`},
+
// slices
+ {[]int{}, `0 []int (len = 0) {}`},
{[]int{1, 2, 3},
`0 []int (len = 3) {
1 . 0: 1
@@ -40,6 +55,12 @@ var tests = []struct {
4 }`},
// structs
+ {struct{}{}, `0 struct {} {}`},
+ {struct{ x int }{007}, `0 struct { x int } {}`},
+ {struct{ X, y int }{42, 991},
+ `0 struct { X int; y int } {
+ 1 . X: 42
+ 2 }`},
{struct{ X, Y int }{42, 991},
`0 struct { X int; Y int } {
1 . X: 42
diff --git a/src/pkg/go/ast/resolve.go b/src/pkg/go/ast/resolve.go
index 908e61c5d..0406bfc58 100644
--- a/src/pkg/go/ast/resolve.go
+++ b/src/pkg/go/ast/resolve.go
@@ -57,7 +57,7 @@ func resolve(scope *Scope, ident *Ident) bool {
// An Importer must determine the canonical import path and
// check the map to see if it is already present in the imports map.
// If so, the Importer can return the map entry. Otherwise, the
-// Importer should load the package data for the given path into
+// Importer should load the package data for the given path into
// a new *Object (pkg), record pkg in the imports map, and then
// return pkg.
type Importer func(imports map[string]*Object, path string) (pkg *Object, err error)
@@ -136,7 +136,7 @@ func NewPackage(fset *token.FileSet, files map[string]*File, importer Importer,
for _, obj := range pkg.Data.(*Scope).Objects {
p.declare(fileScope, pkgScope, obj)
}
- } else {
+ } else if name != "_" {
// declare imported package object in file scope
// (do not re-use pkg in the file scope but create
// a new object instead; the Decl field is different
diff --git a/src/pkg/go/ast/scope.go b/src/pkg/go/ast/scope.go
index 11e6b13f1..8df5b2c65 100644
--- a/src/pkg/go/ast/scope.go
+++ b/src/pkg/go/ast/scope.go
@@ -64,18 +64,16 @@ func (s *Scope) String() string {
// ----------------------------------------------------------------------------
// Objects
-// TODO(gri) Consider replacing the Object struct with an interface
-// and a corresponding set of object implementations.
-
// An Object describes a named language entity such as a package,
// constant, type, variable, function (incl. methods), or label.
//
// The Data fields contains object-specific data:
//
-// Kind Data type Data value
-// Pkg *Scope package scope
-// Con int iota for the respective declaration
-// Con != nil constant value
+// Kind Data type Data value
+// Pkg *types.Package package scope
+// Con int iota for the respective declaration
+// Con != nil constant value
+// Typ *Scope (used as method scope during type checking - transient)
//
type Object struct {
Kind ObjKind
@@ -137,7 +135,7 @@ func (obj *Object) Pos() token.Pos {
return token.NoPos
}
-// ObKind describes what an object represents.
+// ObjKind describes what an object represents.
type ObjKind int
// The list of possible Object kinds.
diff --git a/src/pkg/go/ast/walk.go b/src/pkg/go/ast/walk.go
index 181cfd149..fef2503c3 100644
--- a/src/pkg/go/ast/walk.go
+++ b/src/pkg/go/ast/walk.go
@@ -158,7 +158,9 @@ func Walk(v Visitor, node Node) {
Walk(v, n.Fields)
case *FuncType:
- Walk(v, n.Params)
+ if n.Params != nil {
+ Walk(v, n.Params)
+ }
if n.Results != nil {
Walk(v, n.Results)
}
@@ -344,9 +346,6 @@ func Walk(v Visitor, node Node) {
}
Walk(v, n.Name)
walkDeclList(v, n.Decls)
- for _, g := range n.Comments {
- Walk(v, g)
- }
// don't walk n.Comments - they have been
// visited already through the individual
// nodes