summaryrefslogtreecommitdiff
path: root/src/lib/go/token.go
diff options
context:
space:
mode:
authorRobert Griesemer <gri@golang.org>2009-03-26 10:53:14 -0700
committerRobert Griesemer <gri@golang.org>2009-03-26 10:53:14 -0700
commitecfb1855e02ed9bc71b904a7a51afdaa3631c073 (patch)
treed73a42b68db17bfa6044151ce3e0f62f666c5442 /src/lib/go/token.go
parent95207c8d8eda9ff909cd5ca6bbef053a6ce4bc92 (diff)
downloadgolang-ecfb1855e02ed9bc71b904a7a51afdaa3631c073.tar.gz
- introduce explicit Token type
- convert some functions into methods - corresponding changes in pretty R=r DELTA=57 (3 added, 0 deleted, 54 changed) OCL=26764 CL=26777
Diffstat (limited to 'src/lib/go/token.go')
-rw-r--r--src/lib/go/token.go25
1 files changed, 14 insertions, 11 deletions
diff --git a/src/lib/go/token.go b/src/lib/go/token.go
index 26ff1cb99..a0439b868 100644
--- a/src/lib/go/token.go
+++ b/src/lib/go/token.go
@@ -10,10 +10,13 @@ package token
import "strconv"
+// Token is the set of lexical tokens of the Go programming language.
+type Token int
+
// The list of tokens.
const (
// Special tokens
- ILLEGAL = iota;
+ ILLEGAL Token = iota;
EOF;
COMMENT;
@@ -124,7 +127,7 @@ const (
// At the moment we have no array literal syntax that lets us describe
// the index for each element - use a map for now to make sure they are
// in sync.
-var tokens = map [int] string {
+var tokens = map [Token] string {
ILLEGAL : "ILLEGAL",
EOF : "EOF",
@@ -224,13 +227,13 @@ var tokens = map [int] string {
}
-// TokenString returns the string corresponding to the token tok.
+// String returns the string corresponding to the token tok.
// For operators, delimiters, and keywords the string is the actual
// token character sequence (e.g., for the token ADD, the string is
// "+"). For all other tokens the string corresponds to the token
// constant name (e.g. for the token IDENT, the string is "IDENT").
//
-func TokenString(tok int) string {
+func (tok Token) String() string {
if str, exists := tokens[tok]; exists {
return str;
}
@@ -254,7 +257,7 @@ const (
// Precedence returns the syntax precedence of the operator
// token tok or LowestPrecedence if tok is not an operator.
//
-func Precedence(tok int) int {
+func (tok Token) Precedence() int {
switch tok {
case COLON:
return 0;
@@ -275,10 +278,10 @@ func Precedence(tok int) int {
}
-var keywords map [string] int;
+var keywords map [string] Token;
func init() {
- keywords = make(map [string] int);
+ keywords = make(map [string] Token);
for i := keyword_beg + 1; i < keyword_end; i++ {
keywords[tokens[i]] = i;
}
@@ -287,7 +290,7 @@ func init() {
// Lookup maps an identifier to its keyword token or IDENT (if not a keyword).
//
-func Lookup(ident []byte) int {
+func Lookup(ident []byte) Token {
// TODO Maps with []byte key are illegal because []byte does not
// support == . Should find a more efficient solution eventually.
if tok, is_keyword := keywords[string(ident)]; is_keyword {
@@ -302,20 +305,20 @@ func Lookup(ident []byte) int {
// IsLiteral returns true for tokens corresponding to identifiers
// and basic type literals; returns false otherwise.
//
-func IsLiteral(tok int) bool {
+func (tok Token) IsLiteral() bool {
return literal_beg < tok && tok < literal_end;
}
// IsOperator returns true for tokens corresponding to operators and
// delimiters; returns false otherwise.
//
-func IsOperator(tok int) bool {
+func (tok Token) IsOperator() bool {
return operator_beg < tok && tok < operator_end;
}
// IsKeyword returns true for tokens corresponding to keywords;
// returns false otherwise.
//
-func IsKeyword(tok int) bool {
+func (tok Token) IsKeyword() bool {
return keyword_beg < tok && tok < keyword_end;
}