summaryrefslogtreecommitdiff
path: root/src/pkg/go/types
diff options
context:
space:
mode:
authorOndřej Surý <ondrej@sury.org>2011-09-13 13:11:55 +0200
committerOndřej Surý <ondrej@sury.org>2011-09-13 13:11:55 +0200
commit80f18fc933cf3f3e829c5455a1023d69f7b86e52 (patch)
tree4b825dc642cb6eb9a060e54bf8d69288fbee4904 /src/pkg/go/types
parent28592ee1ea1f5cdffcf85472f9de0285d928cf12 (diff)
downloadgolang-80f18fc933cf3f3e829c5455a1023d69f7b86e52.tar.gz
Imported Upstream version 60
Diffstat (limited to 'src/pkg/go/types')
-rw-r--r--src/pkg/go/types/Makefile16
-rw-r--r--src/pkg/go/types/check.go233
-rw-r--r--src/pkg/go/types/check_test.go224
-rw-r--r--src/pkg/go/types/const.go347
-rw-r--r--src/pkg/go/types/exportdata.go135
-rw-r--r--src/pkg/go/types/gcimporter.go838
-rw-r--r--src/pkg/go/types/gcimporter_test.go106
-rw-r--r--src/pkg/go/types/testdata/exports.go89
-rw-r--r--src/pkg/go/types/testdata/test0.src154
-rw-r--r--src/pkg/go/types/types.go273
-rw-r--r--src/pkg/go/types/universe.go115
11 files changed, 0 insertions, 2530 deletions
diff --git a/src/pkg/go/types/Makefile b/src/pkg/go/types/Makefile
deleted file mode 100644
index 4ca707c73..000000000
--- a/src/pkg/go/types/Makefile
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright 2010 The Go Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file.
-
-include ../../../Make.inc
-
-TARG=go/types
-GOFILES=\
- check.go\
- const.go\
- exportdata.go\
- gcimporter.go\
- types.go\
- universe.go\
-
-include ../../../Make.pkg
diff --git a/src/pkg/go/types/check.go b/src/pkg/go/types/check.go
deleted file mode 100644
index 02d662926..000000000
--- a/src/pkg/go/types/check.go
+++ /dev/null
@@ -1,233 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This file implements the Check function, which typechecks a package.
-
-package types
-
-import (
- "fmt"
- "go/ast"
- "go/scanner"
- "go/token"
- "os"
- "strconv"
-)
-
-
-const debug = false
-
-
-type checker struct {
- fset *token.FileSet
- scanner.ErrorVector
- types map[ast.Expr]Type
-}
-
-
-func (c *checker) errorf(pos token.Pos, format string, args ...interface{}) string {
- msg := fmt.Sprintf(format, args...)
- c.Error(c.fset.Position(pos), msg)
- return msg
-}
-
-
-// collectFields collects struct fields tok = token.STRUCT), interface methods
-// (tok = token.INTERFACE), and function arguments/results (tok = token.FUNC).
-func (c *checker) collectFields(tok token.Token, list *ast.FieldList, cycleOk bool) (fields ObjList, tags []string, isVariadic bool) {
- if list != nil {
- for _, field := range list.List {
- ftype := field.Type
- if t, ok := ftype.(*ast.Ellipsis); ok {
- ftype = t.Elt
- isVariadic = true
- }
- typ := c.makeType(ftype, cycleOk)
- tag := ""
- if field.Tag != nil {
- assert(field.Tag.Kind == token.STRING)
- tag, _ = strconv.Unquote(field.Tag.Value)
- }
- if len(field.Names) > 0 {
- // named fields
- for _, name := range field.Names {
- obj := name.Obj
- obj.Type = typ
- fields = append(fields, obj)
- if tok == token.STRUCT {
- tags = append(tags, tag)
- }
- }
- } else {
- // anonymous field
- switch tok {
- case token.STRUCT:
- tags = append(tags, tag)
- fallthrough
- case token.FUNC:
- obj := ast.NewObj(ast.Var, "")
- obj.Type = typ
- fields = append(fields, obj)
- case token.INTERFACE:
- utyp := Underlying(typ)
- if typ, ok := utyp.(*Interface); ok {
- // TODO(gri) This is not good enough. Check for double declarations!
- fields = append(fields, typ.Methods...)
- } else if _, ok := utyp.(*Bad); !ok {
- // if utyp is Bad, don't complain (the root cause was reported before)
- c.errorf(ftype.Pos(), "interface contains embedded non-interface type")
- }
- default:
- panic("unreachable")
- }
- }
- }
- }
- return
-}
-
-
-// makeType makes a new type for an AST type specification x or returns
-// the type referred to by a type name x. If cycleOk is set, a type may
-// refer to itself directly or indirectly; otherwise cycles are errors.
-//
-func (c *checker) makeType(x ast.Expr, cycleOk bool) (typ Type) {
- if debug {
- fmt.Printf("makeType (cycleOk = %v)\n", cycleOk)
- ast.Print(c.fset, x)
- defer func() {
- fmt.Printf("-> %T %v\n\n", typ, typ)
- }()
- }
-
- switch t := x.(type) {
- case *ast.BadExpr:
- return &Bad{}
-
- case *ast.Ident:
- // type name
- obj := t.Obj
- if obj == nil {
- // unresolved identifier (error has been reported before)
- return &Bad{Msg: "unresolved identifier"}
- }
- if obj.Kind != ast.Typ {
- msg := c.errorf(t.Pos(), "%s is not a type", t.Name)
- return &Bad{Msg: msg}
- }
- c.checkObj(obj, cycleOk)
- if !cycleOk && obj.Type.(*Name).Underlying == nil {
- // TODO(gri) Enable this message again once its position
- // is independent of the underlying map implementation.
- // msg := c.errorf(obj.Pos(), "illegal cycle in declaration of %s", obj.Name)
- msg := "illegal cycle"
- return &Bad{Msg: msg}
- }
- return obj.Type.(Type)
-
- case *ast.ParenExpr:
- return c.makeType(t.X, cycleOk)
-
- case *ast.SelectorExpr:
- // qualified identifier
- // TODO (gri) eventually, this code belongs to expression
- // type checking - here for the time being
- if ident, ok := t.X.(*ast.Ident); ok {
- if obj := ident.Obj; obj != nil {
- if obj.Kind != ast.Pkg {
- msg := c.errorf(ident.Pos(), "%s is not a package", obj.Name)
- return &Bad{Msg: msg}
- }
- // TODO(gri) we have a package name but don't
- // have the mapping from package name to package
- // scope anymore (created in ast.NewPackage).
- return &Bad{} // for now
- }
- }
- // TODO(gri) can this really happen (the parser should have excluded this)?
- msg := c.errorf(t.Pos(), "expected qualified identifier")
- return &Bad{Msg: msg}
-
- case *ast.StarExpr:
- return &Pointer{Base: c.makeType(t.X, true)}
-
- case *ast.ArrayType:
- if t.Len != nil {
- // TODO(gri) compute length
- return &Array{Elt: c.makeType(t.Elt, cycleOk)}
- }
- return &Slice{Elt: c.makeType(t.Elt, true)}
-
- case *ast.StructType:
- fields, tags, _ := c.collectFields(token.STRUCT, t.Fields, cycleOk)
- return &Struct{Fields: fields, Tags: tags}
-
- case *ast.FuncType:
- params, _, _ := c.collectFields(token.FUNC, t.Params, true)
- results, _, isVariadic := c.collectFields(token.FUNC, t.Results, true)
- return &Func{Recv: nil, Params: params, Results: results, IsVariadic: isVariadic}
-
- case *ast.InterfaceType:
- methods, _, _ := c.collectFields(token.INTERFACE, t.Methods, cycleOk)
- methods.Sort()
- return &Interface{Methods: methods}
-
- case *ast.MapType:
- return &Map{Key: c.makeType(t.Key, true), Elt: c.makeType(t.Key, true)}
-
- case *ast.ChanType:
- return &Chan{Dir: t.Dir, Elt: c.makeType(t.Value, true)}
- }
-
- panic(fmt.Sprintf("unreachable (%T)", x))
-}
-
-
-// checkObj type checks an object.
-func (c *checker) checkObj(obj *ast.Object, ref bool) {
- if obj.Type != nil {
- // object has already been type checked
- return
- }
-
- switch obj.Kind {
- case ast.Bad:
- // ignore
-
- case ast.Con:
- // TODO(gri) complete this
-
- case ast.Typ:
- typ := &Name{Obj: obj}
- obj.Type = typ // "mark" object so recursion terminates
- typ.Underlying = Underlying(c.makeType(obj.Decl.(*ast.TypeSpec).Type, ref))
-
- case ast.Var:
- // TODO(gri) complete this
-
- case ast.Fun:
- // TODO(gri) complete this
-
- default:
- panic("unreachable")
- }
-}
-
-
-// Check typechecks a package.
-// It augments the AST by assigning types to all ast.Objects and returns a map
-// of types for all expression nodes in statements, and a scanner.ErrorList if
-// there are errors.
-//
-func Check(fset *token.FileSet, pkg *ast.Package) (types map[ast.Expr]Type, err os.Error) {
- var c checker
- c.fset = fset
- c.types = make(map[ast.Expr]Type)
-
- for _, obj := range pkg.Scope.Objects {
- c.checkObj(obj, false)
- }
-
- return c.types, c.GetError(scanner.NoMultiples)
-}
diff --git a/src/pkg/go/types/check_test.go b/src/pkg/go/types/check_test.go
deleted file mode 100644
index 6ecb12b1e..000000000
--- a/src/pkg/go/types/check_test.go
+++ /dev/null
@@ -1,224 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This file implements a typechecker test harness. The packages specified
-// in tests are typechecked. Error messages reported by the typechecker are
-// compared against the error messages expected in the test files.
-//
-// Expected errors are indicated in the test files by putting a comment
-// of the form /* ERROR "rx" */ immediately following an offending token.
-// The harness will verify that an error matching the regular expression
-// rx is reported at that source position. Consecutive comments may be
-// used to indicate multiple errors for the same token position.
-//
-// For instance, the following test file indicates that a "not declared"
-// error should be reported for the undeclared variable x:
-//
-// package p
-// func f() {
-// _ = x /* ERROR "not declared" */ + 1
-// }
-
-package types
-
-import (
- "fmt"
- "go/ast"
- "go/parser"
- "go/scanner"
- "go/token"
- "io/ioutil"
- "os"
- "regexp"
- "testing"
-)
-
-
-// The test filenames do not end in .go so that they are invisible
-// to gofmt since they contain comments that must not change their
-// positions relative to surrounding tokens.
-
-var tests = []struct {
- name string
- files []string
-}{
- {"test0", []string{"testdata/test0.src"}},
-}
-
-
-var fset = token.NewFileSet()
-
-
-// TODO(gri) This functionality should be in token.Fileset.
-func getFile(filename string) *token.File {
- for f := range fset.Files() {
- if f.Name() == filename {
- return f
- }
- }
- return nil
-}
-
-
-// TODO(gri) This functionality should be in token.Fileset.
-func getPos(filename string, offset int) token.Pos {
- if f := getFile(filename); f != nil {
- return f.Pos(offset)
- }
- return token.NoPos
-}
-
-
-// TODO(gri) Need to revisit parser interface. We should be able to use parser.ParseFiles
-// or a similar function instead.
-func parseFiles(t *testing.T, testname string, filenames []string) (map[string]*ast.File, os.Error) {
- files := make(map[string]*ast.File)
- var errors scanner.ErrorList
- for _, filename := range filenames {
- if _, exists := files[filename]; exists {
- t.Fatalf("%s: duplicate file %s", testname, filename)
- }
- file, err := parser.ParseFile(fset, filename, nil, parser.DeclarationErrors)
- if file == nil {
- t.Fatalf("%s: could not parse file %s", testname, filename)
- }
- files[filename] = file
- if err != nil {
- // if the parser returns a non-scanner.ErrorList error
- // the file couldn't be read in the first place and
- // file == nil; in that case we shouldn't reach here
- errors = append(errors, err.(scanner.ErrorList)...)
- }
-
- }
- return files, errors
-}
-
-
-// ERROR comments must be of the form /* ERROR "rx" */ and rx is
-// a regular expression that matches the expected error message.
-//
-var errRx = regexp.MustCompile(`^/\* *ERROR *"([^"]*)" *\*/$`)
-
-// expectedErrors collects the regular expressions of ERROR comments found
-// in files and returns them as a map of error positions to error messages.
-//
-func expectedErrors(t *testing.T, testname string, files map[string]*ast.File) map[token.Pos]string {
- errors := make(map[token.Pos]string)
- for filename := range files {
- src, err := ioutil.ReadFile(filename)
- if err != nil {
- t.Fatalf("%s: could not read %s", testname, filename)
- }
-
- var s scanner.Scanner
- // file was parsed already - do not add it again to the file
- // set otherwise the position information returned here will
- // not match the position information collected by the parser
- s.Init(getFile(filename), src, nil, scanner.ScanComments)
- var prev token.Pos // position of last non-comment token
-
- scanFile:
- for {
- pos, tok, lit := s.Scan()
- switch tok {
- case token.EOF:
- break scanFile
- case token.COMMENT:
- s := errRx.FindStringSubmatch(lit)
- if len(s) == 2 {
- errors[prev] = string(s[1])
- }
- default:
- prev = pos
- }
- }
- }
- return errors
-}
-
-
-func eliminate(t *testing.T, expected map[token.Pos]string, errors os.Error) {
- if errors == nil {
- return
- }
- for _, error := range errors.(scanner.ErrorList) {
- // error.Pos is a token.Position, but we want
- // a token.Pos so we can do a map lookup
- // TODO(gri) Need to move scanner.Errors over
- // to use token.Pos and file set info.
- pos := getPos(error.Pos.Filename, error.Pos.Offset)
- if msg, found := expected[pos]; found {
- // we expect a message at pos; check if it matches
- rx, err := regexp.Compile(msg)
- if err != nil {
- t.Errorf("%s: %v", error.Pos, err)
- continue
- }
- if match := rx.MatchString(error.Msg); !match {
- t.Errorf("%s: %q does not match %q", error.Pos, error.Msg, msg)
- continue
- }
- // we have a match - eliminate this error
- expected[pos] = "", false
- } else {
- // To keep in mind when analyzing failed test output:
- // If the same error position occurs multiple times in errors,
- // this message will be triggered (because the first error at
- // the position removes this position from the expected errors).
- t.Errorf("%s: no (multiple?) error expected, but found: %s", error.Pos, error.Msg)
- }
- }
-}
-
-
-func check(t *testing.T, testname string, testfiles []string) {
- // TODO(gri) Eventually all these different phases should be
- // subsumed into a single function call that takes
- // a set of files and creates a fully resolved and
- // type-checked AST.
-
- files, err := parseFiles(t, testname, testfiles)
-
- // we are expecting the following errors
- // (collect these after parsing the files so that
- // they are found in the file set)
- errors := expectedErrors(t, testname, files)
-
- // verify errors returned by the parser
- eliminate(t, errors, err)
-
- // verify errors returned after resolving identifiers
- pkg, err := ast.NewPackage(fset, files, GcImporter, Universe)
- eliminate(t, errors, err)
-
- // verify errors returned by the typechecker
- _, err = Check(fset, pkg)
- eliminate(t, errors, err)
-
- // there should be no expected errors left
- if len(errors) > 0 {
- t.Errorf("%s: %d errors not reported:", testname, len(errors))
- for pos, msg := range errors {
- t.Errorf("%s: %s\n", fset.Position(pos), msg)
- }
- }
-}
-
-
-func TestCheck(t *testing.T) {
- // For easy debugging w/o changing the testing code,
- // if there is a local test file, only test that file.
- const testfile = "test.go"
- if fi, err := os.Stat(testfile); err == nil && fi.IsRegular() {
- fmt.Printf("WARNING: Testing only %s (remove it to run all tests)\n", testfile)
- check(t, testfile, []string{testfile})
- return
- }
-
- // Otherwise, run all the tests.
- for _, test := range tests {
- check(t, test.name, test.files)
- }
-}
diff --git a/src/pkg/go/types/const.go b/src/pkg/go/types/const.go
deleted file mode 100644
index 6fdc22f6b..000000000
--- a/src/pkg/go/types/const.go
+++ /dev/null
@@ -1,347 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This file implements operations on ideal constants.
-
-package types
-
-import (
- "big"
- "go/token"
- "strconv"
-)
-
-
-// TODO(gri) Consider changing the API so Const is an interface
-// and operations on consts don't have to type switch.
-
-// A Const implements an ideal constant Value.
-// The zero value z for a Const is not a valid constant value.
-type Const struct {
- // representation of constant values:
- // ideal bool -> bool
- // ideal int -> *big.Int
- // ideal float -> *big.Rat
- // ideal complex -> cmplx
- // ideal string -> string
- val interface{}
-}
-
-
-// Representation of complex values.
-type cmplx struct {
- re, im *big.Rat
-}
-
-
-func assert(cond bool) {
- if !cond {
- panic("go/types internal error: assertion failed")
- }
-}
-
-
-// MakeConst makes an ideal constant from a literal
-// token and the corresponding literal string.
-func MakeConst(tok token.Token, lit string) Const {
- switch tok {
- case token.INT:
- var x big.Int
- _, ok := x.SetString(lit, 0)
- assert(ok)
- return Const{&x}
- case token.FLOAT:
- var y big.Rat
- _, ok := y.SetString(lit)
- assert(ok)
- return Const{&y}
- case token.IMAG:
- assert(lit[len(lit)-1] == 'i')
- var im big.Rat
- _, ok := im.SetString(lit[0 : len(lit)-1])
- assert(ok)
- return Const{cmplx{big.NewRat(0, 1), &im}}
- case token.CHAR:
- assert(lit[0] == '\'' && lit[len(lit)-1] == '\'')
- code, _, _, err := strconv.UnquoteChar(lit[1:len(lit)-1], '\'')
- assert(err == nil)
- return Const{big.NewInt(int64(code))}
- case token.STRING:
- s, err := strconv.Unquote(lit)
- assert(err == nil)
- return Const{s}
- }
- panic("unreachable")
-}
-
-
-// MakeZero returns the zero constant for the given type.
-func MakeZero(typ *Type) Const {
- // TODO(gri) fix this
- return Const{0}
-}
-
-
-// Match attempts to match the internal constant representations of x and y.
-// If the attempt is successful, the result is the values of x and y,
-// if necessary converted to have the same internal representation; otherwise
-// the results are invalid.
-func (x Const) Match(y Const) (u, v Const) {
- switch a := x.val.(type) {
- case bool:
- if _, ok := y.val.(bool); ok {
- u, v = x, y
- }
- case *big.Int:
- switch y.val.(type) {
- case *big.Int:
- u, v = x, y
- case *big.Rat:
- var z big.Rat
- z.SetInt(a)
- u, v = Const{&z}, y
- case cmplx:
- var z big.Rat
- z.SetInt(a)
- u, v = Const{cmplx{&z, big.NewRat(0, 1)}}, y
- }
- case *big.Rat:
- switch y.val.(type) {
- case *big.Int:
- v, u = y.Match(x)
- case *big.Rat:
- u, v = x, y
- case cmplx:
- u, v = Const{cmplx{a, big.NewRat(0, 0)}}, y
- }
- case cmplx:
- switch y.val.(type) {
- case *big.Int, *big.Rat:
- v, u = y.Match(x)
- case cmplx:
- u, v = x, y
- }
- case string:
- if _, ok := y.val.(string); ok {
- u, v = x, y
- }
- default:
- panic("unreachable")
- }
- return
-}
-
-
-// Convert attempts to convert the constant x to a given type.
-// If the attempt is successful, the result is the new constant;
-// otherwise the result is invalid.
-func (x Const) Convert(typ *Type) Const {
- // TODO(gri) implement this
- switch x := x.val.(type) {
- case bool:
- case *big.Int:
- case *big.Rat:
- case cmplx:
- case string:
- }
- return x
-}
-
-
-func (x Const) String() string {
- switch x := x.val.(type) {
- case bool:
- if x {
- return "true"
- }
- return "false"
- case *big.Int:
- return x.String()
- case *big.Rat:
- return x.FloatString(10) // 10 digits of precision after decimal point seems fine
- case cmplx:
- // TODO(gri) don't print 0 components
- return x.re.FloatString(10) + " + " + x.im.FloatString(10) + "i"
- case string:
- return x
- }
- panic("unreachable")
-}
-
-
-func (x Const) UnaryOp(op token.Token) Const {
- panic("unimplemented")
-}
-
-
-func (x Const) BinaryOp(op token.Token, y Const) Const {
- var z interface{}
- switch x := x.val.(type) {
- case bool:
- z = binaryBoolOp(x, op, y.val.(bool))
- case *big.Int:
- z = binaryIntOp(x, op, y.val.(*big.Int))
- case *big.Rat:
- z = binaryFloatOp(x, op, y.val.(*big.Rat))
- case cmplx:
- z = binaryCmplxOp(x, op, y.val.(cmplx))
- case string:
- z = binaryStringOp(x, op, y.val.(string))
- default:
- panic("unreachable")
- }
- return Const{z}
-}
-
-
-func binaryBoolOp(x bool, op token.Token, y bool) interface{} {
- switch op {
- case token.EQL:
- return x == y
- case token.NEQ:
- return x != y
- }
- panic("unreachable")
-}
-
-
-func binaryIntOp(x *big.Int, op token.Token, y *big.Int) interface{} {
- var z big.Int
- switch op {
- case token.ADD:
- return z.Add(x, y)
- case token.SUB:
- return z.Sub(x, y)
- case token.MUL:
- return z.Mul(x, y)
- case token.QUO:
- return z.Quo(x, y)
- case token.REM:
- return z.Rem(x, y)
- case token.AND:
- return z.And(x, y)
- case token.OR:
- return z.Or(x, y)
- case token.XOR:
- return z.Xor(x, y)
- case token.AND_NOT:
- return z.AndNot(x, y)
- case token.SHL:
- panic("unimplemented")
- case token.SHR:
- panic("unimplemented")
- case token.EQL:
- return x.Cmp(y) == 0
- case token.NEQ:
- return x.Cmp(y) != 0
- case token.LSS:
- return x.Cmp(y) < 0
- case token.LEQ:
- return x.Cmp(y) <= 0
- case token.GTR:
- return x.Cmp(y) > 0
- case token.GEQ:
- return x.Cmp(y) >= 0
- }
- panic("unreachable")
-}
-
-
-func binaryFloatOp(x *big.Rat, op token.Token, y *big.Rat) interface{} {
- var z big.Rat
- switch op {
- case token.ADD:
- return z.Add(x, y)
- case token.SUB:
- return z.Sub(x, y)
- case token.MUL:
- return z.Mul(x, y)
- case token.QUO:
- return z.Quo(x, y)
- case token.EQL:
- return x.Cmp(y) == 0
- case token.NEQ:
- return x.Cmp(y) != 0
- case token.LSS:
- return x.Cmp(y) < 0
- case token.LEQ:
- return x.Cmp(y) <= 0
- case token.GTR:
- return x.Cmp(y) > 0
- case token.GEQ:
- return x.Cmp(y) >= 0
- }
- panic("unreachable")
-}
-
-
-func binaryCmplxOp(x cmplx, op token.Token, y cmplx) interface{} {
- a, b := x.re, x.im
- c, d := y.re, y.im
- switch op {
- case token.ADD:
- // (a+c) + i(b+d)
- var re, im big.Rat
- re.Add(a, c)
- im.Add(b, d)
- return cmplx{&re, &im}
- case token.SUB:
- // (a-c) + i(b-d)
- var re, im big.Rat
- re.Sub(a, c)
- im.Sub(b, d)
- return cmplx{&re, &im}
- case token.MUL:
- // (ac-bd) + i(bc+ad)
- var ac, bd, bc, ad big.Rat
- ac.Mul(a, c)
- bd.Mul(b, d)
- bc.Mul(b, c)
- ad.Mul(a, d)
- var re, im big.Rat
- re.Sub(&ac, &bd)
- im.Add(&bc, &ad)
- return cmplx{&re, &im}
- case token.QUO:
- // (ac+bd)/s + i(bc-ad)/s, with s = cc + dd
- var ac, bd, bc, ad, s big.Rat
- ac.Mul(a, c)
- bd.Mul(b, d)
- bc.Mul(b, c)
- ad.Mul(a, d)
- s.Add(c.Mul(c, c), d.Mul(d, d))
- var re, im big.Rat
- re.Add(&ac, &bd)
- re.Quo(&re, &s)
- im.Sub(&bc, &ad)
- im.Quo(&im, &s)
- return cmplx{&re, &im}
- case token.EQL:
- return a.Cmp(c) == 0 && b.Cmp(d) == 0
- case token.NEQ:
- return a.Cmp(c) != 0 || b.Cmp(d) != 0
- }
- panic("unreachable")
-}
-
-
-func binaryStringOp(x string, op token.Token, y string) interface{} {
- switch op {
- case token.ADD:
- return x + y
- case token.EQL:
- return x == y
- case token.NEQ:
- return x != y
- case token.LSS:
- return x < y
- case token.LEQ:
- return x <= y
- case token.GTR:
- return x > y
- case token.GEQ:
- return x >= y
- }
- panic("unreachable")
-}
diff --git a/src/pkg/go/types/exportdata.go b/src/pkg/go/types/exportdata.go
deleted file mode 100644
index f68133761..000000000
--- a/src/pkg/go/types/exportdata.go
+++ /dev/null
@@ -1,135 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This file implements ExportData.
-
-package types
-
-import (
- "bufio"
- "fmt"
- "io"
- "os"
- "strconv"
- "strings"
-)
-
-
-func readGopackHeader(buf *bufio.Reader) (name string, size int, err os.Error) {
- // See $GOROOT/include/ar.h.
- hdr := make([]byte, 64+12+6+6+8+10+2)
- _, err = io.ReadFull(buf, hdr)
- if err != nil {
- return
- }
- if trace {
- fmt.Printf("header: %s", hdr)
- }
- s := strings.TrimSpace(string(hdr[64+12+6+6+8:][:10]))
- size, err = strconv.Atoi(s)
- if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' {
- err = os.NewError("invalid archive header")
- return
- }
- name = strings.TrimSpace(string(hdr[:64]))
- return
-}
-
-
-type dataReader struct {
- *bufio.Reader
- io.Closer
-}
-
-
-// ExportData returns a readCloser positioned at the beginning of the
-// export data section of the given object/archive file, or an error.
-// It is the caller's responsibility to close the readCloser.
-//
-func ExportData(filename string) (rc io.ReadCloser, err os.Error) {
- file, err := os.Open(filename)
- if err != nil {
- return
- }
-
- defer func() {
- if err != nil {
- file.Close()
- // Add file name to error.
- err = fmt.Errorf("reading export data: %s: %v", filename, err)
- }
- }()
-
- buf := bufio.NewReader(file)
-
- // Read first line to make sure this is an object file.
- line, err := buf.ReadSlice('\n')
- if err != nil {
- return
- }
- if string(line) == "!<arch>\n" {
- // Archive file. Scan to __.PKGDEF, which should
- // be second archive entry.
- var name string
- var size int
-
- // First entry should be __.SYMDEF.
- // Read and discard.
- if name, size, err = readGopackHeader(buf); err != nil {
- return
- }
- if name != "__.SYMDEF" {
- err = os.NewError("go archive does not begin with __.SYMDEF")
- return
- }
- const block = 4096
- tmp := make([]byte, block)
- for size > 0 {
- n := size
- if n > block {
- n = block
- }
- _, err = io.ReadFull(buf, tmp[:n])
- if err != nil {
- return
- }
- size -= n
- }
-
- // Second entry should be __.PKGDEF.
- if name, size, err = readGopackHeader(buf); err != nil {
- return
- }
- if name != "__.PKGDEF" {
- err = os.NewError("go archive is missing __.PKGDEF")
- return
- }
-
- // Read first line of __.PKGDEF data, so that line
- // is once again the first line of the input.
- line, err = buf.ReadSlice('\n')
- if err != nil {
- return
- }
- }
-
- // Now at __.PKGDEF in archive or still at beginning of file.
- // Either way, line should begin with "go object ".
- if !strings.HasPrefix(string(line), "go object ") {
- err = os.NewError("not a go object file")
- return
- }
-
- // Skip over object header to export data.
- // Begins after first line with $$.
- for line[0] != '$' {
- line, err = buf.ReadSlice('\n')
- if err != nil {
- return
- }
- }
-
- rc = &dataReader{buf, file}
- return
-}
diff --git a/src/pkg/go/types/gcimporter.go b/src/pkg/go/types/gcimporter.go
deleted file mode 100644
index aa0bb9160..000000000
--- a/src/pkg/go/types/gcimporter.go
+++ /dev/null
@@ -1,838 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This file implements an ast.Importer for gc generated object files.
-// TODO(gri) Eventually move this into a separate package outside types.
-
-package types
-
-import (
- "big"
- "fmt"
- "go/ast"
- "go/token"
- "io"
- "os"
- "path/filepath"
- "runtime"
- "scanner"
- "strconv"
-)
-
-
-const trace = false // set to true for debugging
-
-var (
- pkgRoot = filepath.Join(runtime.GOROOT(), "pkg", runtime.GOOS+"_"+runtime.GOARCH)
- pkgExts = [...]string{".a", ".5", ".6", ".8"}
-)
-
-
-// findPkg returns the filename and package id for an import path.
-// If no file was found, an empty filename is returned.
-func findPkg(path string) (filename, id string) {
- if len(path) == 0 {
- return
- }
-
- id = path
- var noext string
- switch path[0] {
- default:
- // "x" -> "$GOROOT/pkg/$GOOS_$GOARCH/x.ext", "x"
- noext = filepath.Join(pkgRoot, path)
-
- case '.':
- // "./x" -> "/this/directory/x.ext", "/this/directory/x"
- cwd, err := os.Getwd()
- if err != nil {
- return
- }
- noext = filepath.Join(cwd, path)
- id = noext
-
- case '/':
- // "/x" -> "/x.ext", "/x"
- noext = path
- }
-
- // try extensions
- for _, ext := range pkgExts {
- filename = noext + ext
- if f, err := os.Stat(filename); err == nil && f.IsRegular() {
- return
- }
- }
-
- filename = "" // not found
- return
-}
-
-
-// gcParser parses the exports inside a gc compiler-produced
-// object/archive file and populates its scope with the results.
-type gcParser struct {
- scanner scanner.Scanner
- tok int // current token
- lit string // literal string; only valid for Ident, Int, String tokens
- id string // package id of imported package
- imports map[string]*ast.Object // package id -> package object
-}
-
-
-func (p *gcParser) init(filename, id string, src io.Reader, imports map[string]*ast.Object) {
- p.scanner.Init(src)
- p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) }
- p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments
- p.scanner.Whitespace = 1<<'\t' | 1<<' '
- p.scanner.Filename = filename // for good error messages
- p.next()
- p.id = id
- p.imports = imports
-}
-
-
-func (p *gcParser) next() {
- p.tok = p.scanner.Scan()
- switch p.tok {
- case scanner.Ident, scanner.Int, scanner.String:
- p.lit = p.scanner.TokenText()
- default:
- p.lit = ""
- }
- if trace {
- fmt.Printf("%s: %q -> %q\n", scanner.TokenString(p.tok), p.scanner.TokenText(), p.lit)
- }
-}
-
-
-// GcImporter implements the ast.Importer signature.
-func GcImporter(imports map[string]*ast.Object, path string) (pkg *ast.Object, err os.Error) {
- if path == "unsafe" {
- return Unsafe, nil
- }
-
- defer func() {
- if r := recover(); r != nil {
- err = r.(importError) // will re-panic if r is not an importError
- if trace {
- panic(err) // force a stack trace
- }
- }
- }()
-
- filename, id := findPkg(path)
- if filename == "" {
- err = os.NewError("can't find import: " + id)
- return
- }
-
- if pkg = imports[id]; pkg != nil {
- return // package was imported before
- }
-
- buf, err := ExportData(filename)
- if err != nil {
- return
- }
- defer buf.Close()
-
- if trace {
- fmt.Printf("importing %s (%s)\n", id, filename)
- }
-
- var p gcParser
- p.init(filename, id, buf, imports)
- pkg = p.parseExport()
- return
-}
-
-
-// ----------------------------------------------------------------------------
-// Error handling
-
-// Internal errors are boxed as importErrors.
-type importError struct {
- pos scanner.Position
- err os.Error
-}
-
-
-func (e importError) String() string {
- return fmt.Sprintf("import error %s (byte offset = %d): %s", e.pos, e.pos.Offset, e.err)
-}
-
-
-func (p *gcParser) error(err interface{}) {
- if s, ok := err.(string); ok {
- err = os.NewError(s)
- }
- // panic with a runtime.Error if err is not an os.Error
- panic(importError{p.scanner.Pos(), err.(os.Error)})
-}
-
-
-func (p *gcParser) errorf(format string, args ...interface{}) {
- p.error(fmt.Sprintf(format, args...))
-}
-
-
-func (p *gcParser) expect(tok int) string {
- lit := p.lit
- if p.tok != tok {
- p.errorf("expected %q, got %q (%q)", scanner.TokenString(tok), scanner.TokenString(p.tok), lit)
- }
- p.next()
- return lit
-}
-
-
-func (p *gcParser) expectSpecial(tok string) {
- sep := 'x' // not white space
- i := 0
- for i < len(tok) && p.tok == int(tok[i]) && sep > ' ' {
- sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
- p.next()
- i++
- }
- if i < len(tok) {
- p.errorf("expected %q, got %q", tok, tok[0:i])
- }
-}
-
-
-func (p *gcParser) expectKeyword(keyword string) {
- lit := p.expect(scanner.Ident)
- if lit != keyword {
- p.errorf("expected keyword %s, got %q", keyword, lit)
- }
-}
-
-
-// ----------------------------------------------------------------------------
-// Import declarations
-
-// ImportPath = string_lit .
-//
-func (p *gcParser) parsePkgId() *ast.Object {
- id, err := strconv.Unquote(p.expect(scanner.String))
- if err != nil {
- p.error(err)
- }
-
- switch id {
- case "":
- // id == "" stands for the imported package id
- // (only known at time of package installation)
- id = p.id
- case "unsafe":
- // package unsafe is not in the imports map - handle explicitly
- return Unsafe
- }
-
- pkg := p.imports[id]
- if pkg == nil {
- scope = ast.NewScope(nil)
- pkg = ast.NewObj(ast.Pkg, "")
- pkg.Data = scope
- p.imports[id] = pkg
- }
-
- return pkg
-}
-
-
-// dotIdentifier = ( ident | '·' ) { ident | int | '·' } .
-func (p *gcParser) parseDotIdent() string {
- ident := ""
- if p.tok != scanner.Int {
- sep := 'x' // not white space
- for (p.tok == scanner.Ident || p.tok == scanner.Int || p.tok == '·') && sep > ' ' {
- ident += p.lit
- sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
- p.next()
- }
- }
- if ident == "" {
- p.expect(scanner.Ident) // use expect() for error handling
- }
- return ident
-}
-
-
-// ExportedName = ImportPath "." dotIdentifier .
-//
-func (p *gcParser) parseExportedName(kind ast.ObjKind) *ast.Object {
- pkg := p.parsePkgId()
- p.expect('.')
- name := p.parseDotIdent()
-
- // a type may have been declared before - if it exists
- // already in the respective package scope, return that
- // type
- scope := pkg.Data.(*ast.Scope)
- if kind == ast.Typ {
- if obj := scope.Lookup(name); obj != nil {
- assert(obj.Kind == ast.Typ)
- return obj
- }
- }
-
- // any other object must be a newly declared object -
- // create it and insert it into the package scope
- obj := ast.NewObj(kind, name)
- if scope.Insert(obj) != nil {
- p.errorf("already declared: %s", obj.Name)
- }
-
- // a new type object is a named type and may be referred
- // to before the underlying type is known - set it up
- if kind == ast.Typ {
- obj.Type = &Name{Obj: obj}
- }
-
- return obj
-}
-
-
-// ----------------------------------------------------------------------------
-// Types
-
-// BasicType = identifier .
-//
-func (p *gcParser) parseBasicType() Type {
- obj := Universe.Lookup(p.expect(scanner.Ident))
- if obj == nil || obj.Kind != ast.Typ {
- p.errorf("not a basic type: %s", obj.Name)
- }
- return obj.Type.(Type)
-}
-
-
-// ArrayType = "[" int_lit "]" Type .
-//
-func (p *gcParser) parseArrayType() Type {
- // "[" already consumed and lookahead known not to be "]"
- lit := p.expect(scanner.Int)
- p.expect(']')
- elt := p.parseType()
- n, err := strconv.Atoui64(lit)
- if err != nil {
- p.error(err)
- }
- return &Array{Len: n, Elt: elt}
-}
-
-
-// MapType = "map" "[" Type "]" Type .
-//
-func (p *gcParser) parseMapType() Type {
- p.expectKeyword("map")
- p.expect('[')
- key := p.parseType()
- p.expect(']')
- elt := p.parseType()
- return &Map{Key: key, Elt: elt}
-}
-
-
-// Name = identifier | "?" .
-//
-func (p *gcParser) parseName() (name string) {
- switch p.tok {
- case scanner.Ident:
- name = p.lit
- p.next()
- case '?':
- // anonymous
- p.next()
- default:
- p.error("name expected")
- }
- return
-}
-
-
-// Field = Name Type [ ":" string_lit ] .
-//
-func (p *gcParser) parseField() (fld *ast.Object, tag string) {
- name := p.parseName()
- ftyp := p.parseType()
- if name == "" {
- // anonymous field - ftyp must be T or *T and T must be a type name
- if _, ok := Deref(ftyp).(*Name); !ok {
- p.errorf("anonymous field expected")
- }
- }
- if p.tok == ':' {
- p.next()
- tag = p.expect(scanner.String)
- }
- fld = ast.NewObj(ast.Var, name)
- fld.Type = ftyp
- return
-}
-
-
-// StructType = "struct" "{" [ FieldList ] "}" .
-// FieldList = Field { ";" Field } .
-//
-func (p *gcParser) parseStructType() Type {
- var fields []*ast.Object
- var tags []string
-
- parseField := func() {
- fld, tag := p.parseField()
- fields = append(fields, fld)
- tags = append(tags, tag)
- }
-
- p.expectKeyword("struct")
- p.expect('{')
- if p.tok != '}' {
- parseField()
- for p.tok == ';' {
- p.next()
- parseField()
- }
- }
- p.expect('}')
-
- return &Struct{Fields: fields, Tags: tags}
-}
-
-
-// Parameter = ( identifier | "?" ) [ "..." ] Type [ ":" string_lit ] .
-//
-func (p *gcParser) parseParameter() (par *ast.Object, isVariadic bool) {
- name := p.parseName()
- if name == "" {
- name = "_" // cannot access unnamed identifiers
- }
- if p.tok == '.' {
- p.expectSpecial("...")
- isVariadic = true
- }
- ptyp := p.parseType()
- // ignore argument tag
- if p.tok == ':' {
- p.next()
- p.expect(scanner.String)
- }
- par = ast.NewObj(ast.Var, name)
- par.Type = ptyp
- return
-}
-
-
-// Parameters = "(" [ ParameterList ] ")" .
-// ParameterList = { Parameter "," } Parameter .
-//
-func (p *gcParser) parseParameters() (list []*ast.Object, isVariadic bool) {
- parseParameter := func() {
- par, variadic := p.parseParameter()
- list = append(list, par)
- if variadic {
- if isVariadic {
- p.error("... not on final argument")
- }
- isVariadic = true
- }
- }
-
- p.expect('(')
- if p.tok != ')' {
- parseParameter()
- for p.tok == ',' {
- p.next()
- parseParameter()
- }
- }
- p.expect(')')
-
- return
-}
-
-
-// Signature = Parameters [ Result ] .
-// Result = Type | Parameters .
-//
-func (p *gcParser) parseSignature() *Func {
- params, isVariadic := p.parseParameters()
-
- // optional result type
- var results []*ast.Object
- switch p.tok {
- case scanner.Ident, scanner.String, '[', '*', '<':
- // single, unnamed result
- result := ast.NewObj(ast.Var, "_")
- result.Type = p.parseType()
- results = []*ast.Object{result}
- case '(':
- // named or multiple result(s)
- var variadic bool
- results, variadic = p.parseParameters()
- if variadic {
- p.error("... not permitted on result type")
- }
- }
-
- return &Func{Params: params, Results: results, IsVariadic: isVariadic}
-}
-
-
-// MethodSpec = identifier Signature .
-//
-func (p *gcParser) parseMethodSpec() *ast.Object {
- if p.tok == scanner.Ident {
- p.expect(scanner.Ident)
- } else {
- // TODO(gri) should this be parseExportedName here?
- p.parsePkgId()
- p.expect('.')
- p.parseDotIdent()
- }
- p.parseSignature()
-
- // TODO(gri) compute method object
- return ast.NewObj(ast.Fun, "_")
-}
-
-
-// InterfaceType = "interface" "{" [ MethodList ] "}" .
-// MethodList = MethodSpec { ";" MethodSpec } .
-//
-func (p *gcParser) parseInterfaceType() Type {
- var methods ObjList
-
- parseMethod := func() {
- meth := p.parseMethodSpec()
- methods = append(methods, meth)
- }
-
- p.expectKeyword("interface")
- p.expect('{')
- if p.tok != '}' {
- parseMethod()
- for p.tok == ';' {
- p.next()
- parseMethod()
- }
- }
- p.expect('}')
-
- methods.Sort()
- return &Interface{Methods: methods}
-}
-
-
-// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
-//
-func (p *gcParser) parseChanType() Type {
- dir := ast.SEND | ast.RECV
- if p.tok == scanner.Ident {
- p.expectKeyword("chan")
- if p.tok == '<' {
- p.expectSpecial("<-")
- dir = ast.SEND
- }
- } else {
- p.expectSpecial("<-")
- p.expectKeyword("chan")
- dir = ast.RECV
- }
- elt := p.parseType()
- return &Chan{Dir: dir, Elt: elt}
-}
-
-
-// Type =
-// BasicType | TypeName | ArrayType | SliceType | StructType |
-// PointerType | FuncType | InterfaceType | MapType | ChanType |
-// "(" Type ")" .
-// BasicType = ident .
-// TypeName = ExportedName .
-// SliceType = "[" "]" Type .
-// PointerType = "*" Type .
-// FuncType = "func" Signature .
-//
-func (p *gcParser) parseType() Type {
- switch p.tok {
- case scanner.Ident:
- switch p.lit {
- default:
- return p.parseBasicType()
- case "struct":
- return p.parseStructType()
- case "func":
- // FuncType
- p.next()
- return p.parseSignature()
- case "interface":
- return p.parseInterfaceType()
- case "map":
- return p.parseMapType()
- case "chan":
- return p.parseChanType()
- }
- case scanner.String:
- // TypeName
- return p.parseExportedName(ast.Typ).Type.(Type)
- case '[':
- p.next() // look ahead
- if p.tok == ']' {
- // SliceType
- p.next()
- return &Slice{Elt: p.parseType()}
- }
- return p.parseArrayType()
- case '*':
- // PointerType
- p.next()
- return &Pointer{Base: p.parseType()}
- case '<':
- return p.parseChanType()
- case '(':
- // "(" Type ")"
- p.next()
- typ := p.parseType()
- p.expect(')')
- return typ
- }
- p.errorf("expected type, got %s (%q)", scanner.TokenString(p.tok), p.lit)
- return nil
-}
-
-
-// ----------------------------------------------------------------------------
-// Declarations
-
-// ImportDecl = "import" identifier string_lit .
-//
-func (p *gcParser) parseImportDecl() {
- p.expectKeyword("import")
- // The identifier has no semantic meaning in the import data.
- // It exists so that error messages can print the real package
- // name: binary.ByteOrder instead of "encoding/binary".ByteOrder.
- name := p.expect(scanner.Ident)
- pkg := p.parsePkgId()
- assert(pkg.Name == "" || pkg.Name == name)
- pkg.Name = name
-}
-
-
-// int_lit = [ "+" | "-" ] { "0" ... "9" } .
-//
-func (p *gcParser) parseInt() (sign, val string) {
- switch p.tok {
- case '-':
- p.next()
- sign = "-"
- case '+':
- p.next()
- }
- val = p.expect(scanner.Int)
- return
-}
-
-
-// number = int_lit [ "p" int_lit ] .
-//
-func (p *gcParser) parseNumber() Const {
- // mantissa
- sign, val := p.parseInt()
- mant, ok := new(big.Int).SetString(sign+val, 10)
- assert(ok)
-
- if p.lit == "p" {
- // exponent (base 2)
- p.next()
- sign, val = p.parseInt()
- exp, err := strconv.Atoui(val)
- if err != nil {
- p.error(err)
- }
- if sign == "-" {
- denom := big.NewInt(1)
- denom.Lsh(denom, exp)
- return Const{new(big.Rat).SetFrac(mant, denom)}
- }
- if exp > 0 {
- mant.Lsh(mant, exp)
- }
- return Const{new(big.Rat).SetInt(mant)}
- }
-
- return Const{mant}
-}
-
-
-// ConstDecl = "const" ExportedName [ Type ] "=" Literal .
-// Literal = bool_lit | int_lit | float_lit | complex_lit | string_lit .
-// bool_lit = "true" | "false" .
-// complex_lit = "(" float_lit "+" float_lit ")" .
-// string_lit = `"` { unicode_char } `"` .
-//
-func (p *gcParser) parseConstDecl() {
- p.expectKeyword("const")
- obj := p.parseExportedName(ast.Con)
- var x Const
- var typ Type
- if p.tok != '=' {
- obj.Type = p.parseType()
- }
- p.expect('=')
- switch p.tok {
- case scanner.Ident:
- // bool_lit
- if p.lit != "true" && p.lit != "false" {
- p.error("expected true or false")
- }
- x = Const{p.lit == "true"}
- typ = Bool.Underlying
- p.next()
- case '-', scanner.Int:
- // int_lit
- x = p.parseNumber()
- typ = Int.Underlying
- if _, ok := x.val.(*big.Rat); ok {
- typ = Float64.Underlying
- }
- case '(':
- // complex_lit
- p.next()
- re := p.parseNumber()
- p.expect('+')
- im := p.parseNumber()
- p.expect(')')
- x = Const{cmplx{re.val.(*big.Rat), im.val.(*big.Rat)}}
- typ = Complex128.Underlying
- case scanner.String:
- // string_lit
- x = MakeConst(token.STRING, p.lit)
- p.next()
- typ = String.Underlying
- default:
- p.error("expected literal")
- }
- if obj.Type == nil {
- obj.Type = typ
- }
- obj.Data = x
-}
-
-
-// TypeDecl = "type" ExportedName Type .
-//
-func (p *gcParser) parseTypeDecl() {
- p.expectKeyword("type")
- obj := p.parseExportedName(ast.Typ)
-
- // The type object may have been imported before and thus already
- // have a type associated with it. We still need to parse the type
- // structure, but throw it away if the object already has a type.
- // This ensures that all imports refer to the same type object for
- // a given type declaration.
- typ := p.parseType()
-
- if name := obj.Type.(*Name); name.Underlying == nil {
- assert(Underlying(typ) == typ)
- name.Underlying = typ
- }
-}
-
-
-// VarDecl = "var" ExportedName Type .
-//
-func (p *gcParser) parseVarDecl() {
- p.expectKeyword("var")
- obj := p.parseExportedName(ast.Var)
- obj.Type = p.parseType()
-}
-
-
-// FuncDecl = "func" ExportedName Signature .
-//
-func (p *gcParser) parseFuncDecl() {
- // "func" already consumed
- obj := p.parseExportedName(ast.Fun)
- obj.Type = p.parseSignature()
-}
-
-
-// MethodDecl = "func" Receiver identifier Signature .
-// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" .
-//
-func (p *gcParser) parseMethodDecl() {
- // "func" already consumed
- p.expect('(')
- p.parseParameter() // receiver
- p.expect(')')
- p.expect(scanner.Ident)
- p.parseSignature()
-}
-
-
-// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" .
-//
-func (p *gcParser) parseDecl() {
- switch p.lit {
- case "import":
- p.parseImportDecl()
- case "const":
- p.parseConstDecl()
- case "type":
- p.parseTypeDecl()
- case "var":
- p.parseVarDecl()
- case "func":
- p.next() // look ahead
- if p.tok == '(' {
- p.parseMethodDecl()
- } else {
- p.parseFuncDecl()
- }
- }
- p.expect('\n')
-}
-
-
-// ----------------------------------------------------------------------------
-// Export
-
-// Export = "PackageClause { Decl } "$$" .
-// PackageClause = "package" identifier [ "safe" ] "\n" .
-//
-func (p *gcParser) parseExport() *ast.Object {
- p.expectKeyword("package")
- name := p.expect(scanner.Ident)
- if p.tok != '\n' {
- // A package is safe if it was compiled with the -u flag,
- // which disables the unsafe package.
- // TODO(gri) remember "safe" package
- p.expectKeyword("safe")
- }
- p.expect('\n')
-
- assert(p.imports[p.id] == nil)
- pkg := ast.NewObj(ast.Pkg, name)
- pkg.Data = ast.NewScope(nil)
- p.imports[p.id] = pkg
-
- for p.tok != '$' && p.tok != scanner.EOF {
- p.parseDecl()
- }
-
- if ch := p.scanner.Peek(); p.tok != '$' || ch != '$' {
- // don't call next()/expect() since reading past the
- // export data may cause scanner errors (e.g. NUL chars)
- p.errorf("expected '$$', got %s %c", scanner.TokenString(p.tok), ch)
- }
-
- if n := p.scanner.ErrorCount; n != 0 {
- p.errorf("expected no scanner errors, got %d", n)
- }
-
- return pkg
-}
diff --git a/src/pkg/go/types/gcimporter_test.go b/src/pkg/go/types/gcimporter_test.go
deleted file mode 100644
index 10240add5..000000000
--- a/src/pkg/go/types/gcimporter_test.go
+++ /dev/null
@@ -1,106 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package types
-
-import (
- "exec"
- "go/ast"
- "io/ioutil"
- "path/filepath"
- "runtime"
- "strings"
- "testing"
- "time"
-)
-
-
-var gcName, gcPath string // compiler name and path
-
-func init() {
- // determine compiler
- switch runtime.GOARCH {
- case "386":
- gcName = "8g"
- case "amd64":
- gcName = "6g"
- case "arm":
- gcName = "5g"
- default:
- gcName = "unknown-GOARCH-compiler"
- gcPath = gcName
- return
- }
- gcPath, _ = exec.LookPath(gcName)
-}
-
-
-func compile(t *testing.T, dirname, filename string) {
- cmd := exec.Command(gcPath, filename)
- cmd.Dir = dirname
- out, err := cmd.CombinedOutput()
- if err != nil {
- t.Errorf("%s %s failed: %s", gcName, filename, err)
- return
- }
- t.Logf("%s", string(out))
-}
-
-
-// Use the same global imports map for all tests. The effect is
-// as if all tested packages were imported into a single package.
-var imports = make(map[string]*ast.Object)
-
-func testPath(t *testing.T, path string) bool {
- _, err := GcImporter(imports, path)
- if err != nil {
- t.Errorf("testPath(%s): %s", path, err)
- return false
- }
- return true
-}
-
-
-const maxTime = 3e9 // maximum allotted testing time in ns
-
-func testDir(t *testing.T, dir string, endTime int64) (nimports int) {
- dirname := filepath.Join(pkgRoot, dir)
- list, err := ioutil.ReadDir(dirname)
- if err != nil {
- t.Errorf("testDir(%s): %s", dirname, err)
- }
- for _, f := range list {
- if time.Nanoseconds() >= endTime {
- t.Log("testing time used up")
- return
- }
- switch {
- case f.IsRegular():
- // try extensions
- for _, ext := range pkgExts {
- if strings.HasSuffix(f.Name, ext) {
- name := f.Name[0 : len(f.Name)-len(ext)] // remove extension
- if testPath(t, filepath.Join(dir, name)) {
- nimports++
- }
- }
- }
- case f.IsDirectory():
- nimports += testDir(t, filepath.Join(dir, f.Name), endTime)
- }
- }
- return
-}
-
-
-func TestGcImport(t *testing.T) {
- compile(t, "testdata", "exports.go")
-
- nimports := 0
- if testPath(t, "./testdata/exports") {
- nimports++
- }
- nimports += testDir(t, "", time.Nanoseconds()+maxTime) // installed packages
- t.Logf("tested %d imports", nimports)
-}
diff --git a/src/pkg/go/types/testdata/exports.go b/src/pkg/go/types/testdata/exports.go
deleted file mode 100644
index 035a13fb7..000000000
--- a/src/pkg/go/types/testdata/exports.go
+++ /dev/null
@@ -1,89 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This file is used to generate an object file which
-// serves as test file for gcimporter_test.go.
-
-package exports
-
-import (
- "go/ast"
-)
-
-
-const (
- C0 int = 0
- C1 = 3.14159265
- C2 = 2.718281828i
- C3 = -123.456e-789
- C4 = +123.456E+789
- C5 = 1234i
- C6 = "foo\n"
- C7 = `bar\n`
-)
-
-
-type (
- T1 int
- T2 [10]int
- T3 []int
- T4 *int
- T5 chan int
- T6a chan<- int
- T6b chan (<-chan int)
- T6c chan<- (chan int)
- T7 <-chan *ast.File
- T8 struct{}
- T9 struct {
- a int
- b, c float32
- d []string `go:"tag"`
- }
- T10 struct {
- T8
- T9
- _ *T10
- }
- T11 map[int]string
- T12 interface{}
- T13 interface {
- m1()
- m2(int) float32
- }
- T14 interface {
- T12
- T13
- m3(x ...struct{}) []T9
- }
- T15 func()
- T16 func(int)
- T17 func(x int)
- T18 func() float32
- T19 func() (x float32)
- T20 func(...interface{})
- T21 struct{ next *T21 }
- T22 struct{ link *T23 }
- T23 struct{ link *T22 }
- T24 *T24
- T25 *T26
- T26 *T27
- T27 *T25
- T28 func(T28) T28
-)
-
-
-var (
- V0 int
- V1 = -991.0
-)
-
-
-func F1() {}
-func F2(x int) {}
-func F3() int { return 0 }
-func F4() float32 { return 0 }
-func F5(a, b, c int, u, v, w struct{ x, y T1 }, more ...interface{}) (p, q, r chan<- T10)
-
-
-func (p *T1) M1()
diff --git a/src/pkg/go/types/testdata/test0.src b/src/pkg/go/types/testdata/test0.src
deleted file mode 100644
index 84a1abe27..000000000
--- a/src/pkg/go/types/testdata/test0.src
+++ /dev/null
@@ -1,154 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// type declarations
-
-package test0
-
-import "unsafe"
-
-const pi = 3.1415
-
-type (
- N undeclared /* ERROR "undeclared" */
- B bool
- I int32
- A [10]P
- T struct {
- x, y P
- }
- P *T
- R (*R)
- F func(A) I
- Y interface {
- f(A) I
- }
- S [](((P)))
- M map[I]F
- C chan<- I
-)
-
-
-type (
- p1 pi /* ERROR "not a package" */ .foo
- p2 unsafe.Pointer
-)
-
-
-type (
- Pi pi /* ERROR "not a type" */
-
- a /* DISABLED "illegal cycle" */ a
- a /* ERROR "redeclared" */ int
-
- // where the cycle error appears depends on the
- // order in which declarations are processed
- // (which depends on the order in which a map
- // is iterated through)
- b c
- c /* DISABLED "illegal cycle" */ d
- d e
- e b
-
- t *t
-
- U V
- V *W
- W U
-
- P1 *S2
- P2 P1
-
- S0 struct {
- }
- S1 struct {
- a, b, c int
- u, v, a /* ERROR "redeclared" */ float32
- }
- S2 struct {
- U // anonymous field
- // TODO(gri) recognize double-declaration below
- // U /* ERROR "redeclared" */ int
- }
- S3 struct {
- x S2
- }
- S4/* DISABLED "illegal cycle" */ struct {
- S4
- }
- S5 struct {
- S6
- }
- S6 /* DISABLED "illegal cycle" */ struct {
- field S7
- }
- S7 struct {
- S5
- }
-
- L1 []L1
- L2 []int
-
- A1 [10]int
- A2 /* DISABLED "illegal cycle" */ [10]A2
- A3 /* DISABLED "illegal cycle" */ [10]struct {
- x A4
- }
- A4 [10]A3
-
- F1 func()
- F2 func(x, y, z float32)
- F3 func(x, y, x /* ERROR "redeclared" */ float32)
- F4 func() (x, y, x /* ERROR "redeclared" */ float32)
- F5 func(x int) (x /* ERROR "redeclared" */ float32)
- F6 func(x ...int)
-
- I1 interface{}
- I2 interface {
- m1()
- }
- I3 interface {
- m1()
- m1 /* ERROR "redeclared" */ ()
- }
- I4 interface {
- m1(x, y, x /* ERROR "redeclared" */ float32)
- m2() (x, y, x /* ERROR "redeclared" */ float32)
- m3(x int) (x /* ERROR "redeclared" */ float32)
- }
- I5 interface {
- m1(I5)
- }
- I6 interface {
- S0 /* ERROR "non-interface" */
- }
- I7 interface {
- I1
- I1
- }
- I8 /* DISABLED "illegal cycle" */ interface {
- I8
- }
- I9 /* DISABLED "illegal cycle" */ interface {
- I10
- }
- I10 interface {
- I11
- }
- I11 interface {
- I9
- }
-
- C1 chan int
- C2 <-chan int
- C3 chan<- C3
- C4 chan C5
- C5 chan C6
- C6 chan C4
-
- M1 map[Last]string
- M2 map[string]M2
-
- Last int
-)
diff --git a/src/pkg/go/types/types.go b/src/pkg/go/types/types.go
deleted file mode 100644
index 10b0145b8..000000000
--- a/src/pkg/go/types/types.go
+++ /dev/null
@@ -1,273 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// PACKAGE UNDER CONSTRUCTION. ANY AND ALL PARTS MAY CHANGE.
-// Package types declares the types used to represent Go types.
-//
-package types
-
-import (
- "go/ast"
- "sort"
-)
-
-
-// All types implement the Type interface.
-type Type interface {
- isType()
-}
-
-
-// All concrete types embed ImplementsType which
-// ensures that all types implement the Type interface.
-type ImplementsType struct{}
-
-func (t *ImplementsType) isType() {}
-
-
-// A Bad type is a non-nil placeholder type when we don't know a type.
-type Bad struct {
- ImplementsType
- Msg string // for better error reporting/debugging
-}
-
-
-// A Basic represents a (unnamed) basic type.
-type Basic struct {
- ImplementsType
- // TODO(gri) need a field specifying the exact basic type
-}
-
-
-// An Array represents an array type [Len]Elt.
-type Array struct {
- ImplementsType
- Len uint64
- Elt Type
-}
-
-
-// A Slice represents a slice type []Elt.
-type Slice struct {
- ImplementsType
- Elt Type
-}
-
-
-// A Struct represents a struct type struct{...}.
-// Anonymous fields are represented by objects with empty names.
-type Struct struct {
- ImplementsType
- Fields ObjList // struct fields; or nil
- Tags []string // corresponding tags; or nil
- // TODO(gri) This type needs some rethinking:
- // - at the moment anonymous fields are marked with "" object names,
- // and their names have to be reconstructed
- // - there is no scope for fast lookup (but the parser creates one)
-}
-
-
-// A Pointer represents a pointer type *Base.
-type Pointer struct {
- ImplementsType
- Base Type
-}
-
-
-// A Func represents a function type func(...) (...).
-// Unnamed parameters are represented by objects with empty names.
-type Func struct {
- ImplementsType
- Recv *ast.Object // nil if not a method
- Params ObjList // (incoming) parameters from left to right; or nil
- Results ObjList // (outgoing) results from left to right; or nil
- IsVariadic bool // true if the last parameter's type is of the form ...T
-}
-
-
-// An Interface represents an interface type interface{...}.
-type Interface struct {
- ImplementsType
- Methods ObjList // interface methods sorted by name; or nil
-}
-
-
-// A Map represents a map type map[Key]Elt.
-type Map struct {
- ImplementsType
- Key, Elt Type
-}
-
-
-// A Chan represents a channel type chan Elt, <-chan Elt, or chan<-Elt.
-type Chan struct {
- ImplementsType
- Dir ast.ChanDir
- Elt Type
-}
-
-
-// A Name represents a named type as declared in a type declaration.
-type Name struct {
- ImplementsType
- Underlying Type // nil if not fully declared
- Obj *ast.Object // corresponding declared object
- // TODO(gri) need to remember fields and methods.
-}
-
-
-// If typ is a pointer type, Deref returns the pointer's base type;
-// otherwise it returns typ.
-func Deref(typ Type) Type {
- if typ, ok := typ.(*Pointer); ok {
- return typ.Base
- }
- return typ
-}
-
-
-// Underlying returns the underlying type of a type.
-func Underlying(typ Type) Type {
- if typ, ok := typ.(*Name); ok {
- utyp := typ.Underlying
- if _, ok := utyp.(*Basic); !ok {
- return utyp
- }
- // the underlying type of a type name referring
- // to an (untyped) basic type is the basic type
- // name
- }
- return typ
-}
-
-
-// An ObjList represents an ordered (in some fashion) list of objects.
-type ObjList []*ast.Object
-
-// ObjList implements sort.Interface.
-func (list ObjList) Len() int { return len(list) }
-func (list ObjList) Less(i, j int) bool { return list[i].Name < list[j].Name }
-func (list ObjList) Swap(i, j int) { list[i], list[j] = list[j], list[i] }
-
-// Sort sorts an object list by object name.
-func (list ObjList) Sort() { sort.Sort(list) }
-
-
-// identicalTypes returns true if both lists a and b have the
-// same length and corresponding objects have identical types.
-func identicalTypes(a, b ObjList) bool {
- if len(a) == len(b) {
- for i, x := range a {
- y := b[i]
- if !Identical(x.Type.(Type), y.Type.(Type)) {
- return false
- }
- }
- return true
- }
- return false
-}
-
-
-// Identical returns true if two types are identical.
-func Identical(x, y Type) bool {
- if x == y {
- return true
- }
-
- switch x := x.(type) {
- case *Bad:
- // A Bad type is always identical to any other type
- // (to avoid spurious follow-up errors).
- return true
-
- case *Basic:
- if y, ok := y.(*Basic); ok {
- panic("unimplemented")
- _ = y
- }
-
- case *Array:
- // Two array types are identical if they have identical element types
- // and the same array length.
- if y, ok := y.(*Array); ok {
- return x.Len == y.Len && Identical(x.Elt, y.Elt)
- }
-
- case *Slice:
- // Two slice types are identical if they have identical element types.
- if y, ok := y.(*Slice); ok {
- return Identical(x.Elt, y.Elt)
- }
-
- case *Struct:
- // Two struct types are identical if they have the same sequence of fields,
- // and if corresponding fields have the same names, and identical types,
- // and identical tags. Two anonymous fields are considered to have the same
- // name. Lower-case field names from different packages are always different.
- if y, ok := y.(*Struct); ok {
- // TODO(gri) handle structs from different packages
- if identicalTypes(x.Fields, y.Fields) {
- for i, f := range x.Fields {
- g := y.Fields[i]
- if f.Name != g.Name || x.Tags[i] != y.Tags[i] {
- return false
- }
- }
- return true
- }
- }
-
- case *Pointer:
- // Two pointer types are identical if they have identical base types.
- if y, ok := y.(*Pointer); ok {
- return Identical(x.Base, y.Base)
- }
-
- case *Func:
- // Two function types are identical if they have the same number of parameters
- // and result values, corresponding parameter and result types are identical,
- // and either both functions are variadic or neither is. Parameter and result
- // names are not required to match.
- if y, ok := y.(*Func); ok {
- return identicalTypes(x.Params, y.Params) &&
- identicalTypes(x.Results, y.Results) &&
- x.IsVariadic == y.IsVariadic
- }
-
- case *Interface:
- // Two interface types are identical if they have the same set of methods with
- // the same names and identical function types. Lower-case method names from
- // different packages are always different. The order of the methods is irrelevant.
- if y, ok := y.(*Interface); ok {
- return identicalTypes(x.Methods, y.Methods) // methods are sorted
- }
-
- case *Map:
- // Two map types are identical if they have identical key and value types.
- if y, ok := y.(*Map); ok {
- return Identical(x.Key, y.Key) && Identical(x.Elt, y.Elt)
- }
-
- case *Chan:
- // Two channel types are identical if they have identical value types
- // and the same direction.
- if y, ok := y.(*Chan); ok {
- return x.Dir == y.Dir && Identical(x.Elt, y.Elt)
- }
-
- case *Name:
- // Two named types are identical if their type names originate
- // in the same type declaration.
- if y, ok := y.(*Name); ok {
- return x.Obj == y.Obj ||
- // permit bad objects to be equal to avoid
- // follow up errors
- x.Obj != nil && x.Obj.Kind == ast.Bad ||
- y.Obj != nil && y.Obj.Kind == ast.Bad
- }
- }
-
- return false
-}
diff --git a/src/pkg/go/types/universe.go b/src/pkg/go/types/universe.go
deleted file mode 100644
index 96005cff5..000000000
--- a/src/pkg/go/types/universe.go
+++ /dev/null
@@ -1,115 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// FILE UNDER CONSTRUCTION. ANY AND ALL PARTS MAY CHANGE.
-// This file implements the universe and unsafe package scopes.
-
-package types
-
-import "go/ast"
-
-
-var (
- scope *ast.Scope // current scope to use for initialization
- Universe *ast.Scope
- Unsafe *ast.Object // package unsafe
-)
-
-
-func define(kind ast.ObjKind, name string) *ast.Object {
- obj := ast.NewObj(kind, name)
- if scope.Insert(obj) != nil {
- panic("types internal error: double declaration")
- }
- return obj
-}
-
-
-func defType(name string) *Name {
- obj := define(ast.Typ, name)
- typ := &Name{Underlying: &Basic{}, Obj: obj}
- obj.Type = typ
- return typ
-}
-
-
-func defConst(name string) {
- obj := define(ast.Con, name)
- _ = obj // TODO(gri) fill in other properties
-}
-
-
-func defFun(name string) {
- obj := define(ast.Fun, name)
- _ = obj // TODO(gri) fill in other properties
-}
-
-
-var (
- Bool,
- Int,
- Float64,
- Complex128,
- String *Name
-)
-
-
-func init() {
- scope = ast.NewScope(nil)
- Universe = scope
-
- Bool = defType("bool")
- defType("byte") // TODO(gri) should be an alias for uint8
- defType("complex64")
- Complex128 = defType("complex128")
- defType("float32")
- Float64 = defType("float64")
- defType("int8")
- defType("int16")
- defType("int32")
- defType("int64")
- String = defType("string")
- defType("uint8")
- defType("uint16")
- defType("uint32")
- defType("uint64")
- Int = defType("int")
- defType("uint")
- defType("uintptr")
-
- defConst("true")
- defConst("false")
- defConst("iota")
- defConst("nil")
-
- defFun("append")
- defFun("cap")
- defFun("close")
- defFun("complex")
- defFun("copy")
- defFun("imag")
- defFun("len")
- defFun("make")
- defFun("new")
- defFun("panic")
- defFun("print")
- defFun("println")
- defFun("real")
- defFun("recover")
-
- scope = ast.NewScope(nil)
- Unsafe = ast.NewObj(ast.Pkg, "unsafe")
- Unsafe.Data = scope
-
- defType("Pointer")
-
- defFun("Alignof")
- defFun("New")
- defFun("NewArray")
- defFun("Offsetof")
- defFun("Reflect")
- defFun("Sizeof")
- defFun("Typeof")
- defFun("Unreflect")
-}