summaryrefslogtreecommitdiff
path: root/src/lib/go
diff options
context:
space:
mode:
authorRobert Griesemer <gri@golang.org>2009-03-26 10:53:14 -0700
committerRobert Griesemer <gri@golang.org>2009-03-26 10:53:14 -0700
commitecfb1855e02ed9bc71b904a7a51afdaa3631c073 (patch)
treed73a42b68db17bfa6044151ce3e0f62f666c5442 /src/lib/go
parent95207c8d8eda9ff909cd5ca6bbef053a6ce4bc92 (diff)
downloadgolang-ecfb1855e02ed9bc71b904a7a51afdaa3631c073.tar.gz
- introduce explicit Token type
- convert some functions into methods - corresponding changes in pretty R=r DELTA=57 (3 added, 0 deleted, 54 changed) OCL=26764 CL=26777
Diffstat (limited to 'src/lib/go')
-rw-r--r--src/lib/go/scanner.go14
-rw-r--r--src/lib/go/scanner_test.go16
-rw-r--r--src/lib/go/token.go25
3 files changed, 29 insertions, 26 deletions
diff --git a/src/lib/go/scanner.go b/src/lib/go/scanner.go
index 0910e59de..51dca3aa5 100644
--- a/src/lib/go/scanner.go
+++ b/src/lib/go/scanner.go
@@ -167,7 +167,7 @@ func isDigit(ch int) bool {
}
-func (S *Scanner) scanIdentifier() int {
+func (S *Scanner) scanIdentifier() token.Token {
pos := S.loc.Pos;
for isLetter(S.ch) || isDigit(S.ch) {
S.next();
@@ -193,7 +193,7 @@ func (S *Scanner) scanMantissa(base int) {
}
-func (S *Scanner) scanNumber(seen_decimal_point bool) int {
+func (S *Scanner) scanNumber(seen_decimal_point bool) token.Token {
tok := token.INT;
if seen_decimal_point {
@@ -335,7 +335,7 @@ func (S *Scanner) scanRawString(loc Location) {
// respectively. Otherwise, the result is tok0 if there was no other
// matching character, or tok2 if the matching character was ch2.
-func (S *Scanner) switch2(tok0, tok1 int) int {
+func (S *Scanner) switch2(tok0, tok1 token.Token) token.Token {
if S.ch == '=' {
S.next();
return tok1;
@@ -344,7 +344,7 @@ func (S *Scanner) switch2(tok0, tok1 int) int {
}
-func (S *Scanner) switch3(tok0, tok1, ch2, tok2 int) int {
+func (S *Scanner) switch3(tok0, tok1 token.Token, ch2 int, tok2 token.Token) token.Token {
if S.ch == '=' {
S.next();
return tok1;
@@ -357,7 +357,7 @@ func (S *Scanner) switch3(tok0, tok1, ch2, tok2 int) int {
}
-func (S *Scanner) switch4(tok0, tok1, ch2, tok2, tok3 int) int {
+func (S *Scanner) switch4(tok0, tok1 token.Token, ch2 int, tok2, tok3 token.Token) token.Token {
if S.ch == '=' {
S.next();
return tok1;
@@ -378,7 +378,7 @@ func (S *Scanner) switch4(tok0, tok1, ch2, tok2, tok3 int) int {
// the token tok, and the literal text lit corresponding to the
// token. The source end is indicated by token.EOF.
//
-func (S *Scanner) Scan() (loc Location, tok int, lit []byte) {
+func (S *Scanner) Scan() (loc Location, tok token.Token, lit []byte) {
scan_again:
// skip white space
for S.ch == ' ' || S.ch == '\t' || S.ch == '\n' || S.ch == '\r' {
@@ -468,7 +468,7 @@ scan_again:
// meaning as for the Init function. Tokenize keeps scanning until f returns
// false (usually when the token value is token.EOF).
//
-func Tokenize(src []byte, err ErrorHandler, scan_comments bool, f func (loc Location, tok int, lit []byte) bool) {
+func Tokenize(src []byte, err ErrorHandler, scan_comments bool, f func (loc Location, tok token.Token, lit []byte) bool) {
var s Scanner;
s.Init(src, err, scan_comments);
for f(s.Scan()) {
diff --git a/src/lib/go/scanner_test.go b/src/lib/go/scanner_test.go
index 247bbe4df..2309fcd09 100644
--- a/src/lib/go/scanner_test.go
+++ b/src/lib/go/scanner_test.go
@@ -20,18 +20,18 @@ const /* class */ (
)
-func tokenclass(tok int) int {
+func tokenclass(tok token.Token) int {
switch {
- case token.IsLiteral(tok): return literal;
- case token.IsOperator(tok): return operator;
- case token.IsKeyword(tok): return keyword;
+ case tok.IsLiteral(): return literal;
+ case tok.IsOperator(): return operator;
+ case tok.IsKeyword(): return keyword;
}
return special;
}
type elt struct {
- tok int;
+ tok token.Token;
lit string;
class int;
}
@@ -188,7 +188,7 @@ func Test(t *testing.T) {
index := 0;
eloc := scanner.Location{0, 1, 1};
scanner.Tokenize(io.StringBytes(src), &TestErrorHandler{t}, true,
- func (loc Location, tok int, litb []byte) bool {
+ func (loc Location, tok token.Token, litb []byte) bool {
e := elt{token.EOF, "", special};
if index < len(tokens) {
e = tokens[index];
@@ -208,9 +208,9 @@ func Test(t *testing.T) {
t.Errorf("bad column for %s: got %d, expected %d", lit, loc.Col, eloc.Col);
}
if tok != e.tok {
- t.Errorf("bad token for %s: got %s, expected %s", lit, token.TokenString(tok), token.TokenString(e.tok));
+ t.Errorf("bad token for %s: got %s, expected %s", lit, tok.String(), e.tok.String());
}
- if token.IsLiteral(e.tok) && lit != e.lit {
+ if e.tok.IsLiteral() && lit != e.lit {
t.Errorf("bad literal for %s: got %s, expected %s", lit, lit, e.lit);
}
if tokenclass(tok) != e.class {
diff --git a/src/lib/go/token.go b/src/lib/go/token.go
index 26ff1cb99..a0439b868 100644
--- a/src/lib/go/token.go
+++ b/src/lib/go/token.go
@@ -10,10 +10,13 @@ package token
import "strconv"
+// Token is the set of lexical tokens of the Go programming language.
+type Token int
+
// The list of tokens.
const (
// Special tokens
- ILLEGAL = iota;
+ ILLEGAL Token = iota;
EOF;
COMMENT;
@@ -124,7 +127,7 @@ const (
// At the moment we have no array literal syntax that lets us describe
// the index for each element - use a map for now to make sure they are
// in sync.
-var tokens = map [int] string {
+var tokens = map [Token] string {
ILLEGAL : "ILLEGAL",
EOF : "EOF",
@@ -224,13 +227,13 @@ var tokens = map [int] string {
}
-// TokenString returns the string corresponding to the token tok.
+// String returns the string corresponding to the token tok.
// For operators, delimiters, and keywords the string is the actual
// token character sequence (e.g., for the token ADD, the string is
// "+"). For all other tokens the string corresponds to the token
// constant name (e.g. for the token IDENT, the string is "IDENT").
//
-func TokenString(tok int) string {
+func (tok Token) String() string {
if str, exists := tokens[tok]; exists {
return str;
}
@@ -254,7 +257,7 @@ const (
// Precedence returns the syntax precedence of the operator
// token tok or LowestPrecedence if tok is not an operator.
//
-func Precedence(tok int) int {
+func (tok Token) Precedence() int {
switch tok {
case COLON:
return 0;
@@ -275,10 +278,10 @@ func Precedence(tok int) int {
}
-var keywords map [string] int;
+var keywords map [string] Token;
func init() {
- keywords = make(map [string] int);
+ keywords = make(map [string] Token);
for i := keyword_beg + 1; i < keyword_end; i++ {
keywords[tokens[i]] = i;
}
@@ -287,7 +290,7 @@ func init() {
// Lookup maps an identifier to its keyword token or IDENT (if not a keyword).
//
-func Lookup(ident []byte) int {
+func Lookup(ident []byte) Token {
// TODO Maps with []byte key are illegal because []byte does not
// support == . Should find a more efficient solution eventually.
if tok, is_keyword := keywords[string(ident)]; is_keyword {
@@ -302,20 +305,20 @@ func Lookup(ident []byte) int {
// IsLiteral returns true for tokens corresponding to identifiers
// and basic type literals; returns false otherwise.
//
-func IsLiteral(tok int) bool {
+func (tok Token) IsLiteral() bool {
return literal_beg < tok && tok < literal_end;
}
// IsOperator returns true for tokens corresponding to operators and
// delimiters; returns false otherwise.
//
-func IsOperator(tok int) bool {
+func (tok Token) IsOperator() bool {
return operator_beg < tok && tok < operator_end;
}
// IsKeyword returns true for tokens corresponding to keywords;
// returns false otherwise.
//
-func IsKeyword(tok int) bool {
+func (tok Token) IsKeyword() bool {
return keyword_beg < tok && tok < keyword_end;
}