summaryrefslogtreecommitdiff
path: root/src/lib/go/scanner_test.go
diff options
context:
space:
mode:
authorRobert Griesemer <gri@golang.org>2009-03-12 11:04:11 -0700
committerRobert Griesemer <gri@golang.org>2009-03-12 11:04:11 -0700
commit46baa20ee379176bea302deac076e7bd3c3d91b2 (patch)
tree12876cba27862dbeb82fbbd4df253d5ea8f52892 /src/lib/go/scanner_test.go
parent24c91c2be36ac35ea14088f157a56876ef763692 (diff)
downloadgolang-46baa20ee379176bea302deac076e7bd3c3d91b2.tar.gz
- remove special handling of '\n' characters (used to be treated as comments
for pretty printer purposes - now properly ignored as white space since we have line/col information) - changed sample use in comment to an actually compiled function to make sure sample is actually working - added extra tests (checking line and column values, and the tokenize function) R=rsc DELTA=253 (61 added, 67 deleted, 125 changed) OCL=26143 CL=26181
Diffstat (limited to 'src/lib/go/scanner_test.go')
-rw-r--r--src/lib/go/scanner_test.go294
1 files changed, 153 insertions, 141 deletions
diff --git a/src/lib/go/scanner_test.go b/src/lib/go/scanner_test.go
index 94c2e51d5..221f01256 100644
--- a/src/lib/go/scanner_test.go
+++ b/src/lib/go/scanner_test.go
@@ -31,7 +31,6 @@ func tokenclass(tok int) int {
type elt struct {
- pos int;
tok int;
lit string;
class int;
@@ -40,130 +39,120 @@ type elt struct {
var tokens = [...]elt{
// Special tokens
- elt{ 0, token.COMMENT, "/* a comment */", special },
- elt{ 0, token.COMMENT, "\n", special },
+ elt{ token.COMMENT, "/* a comment */", special },
+ elt{ token.COMMENT, "// a comment \n", special },
// Identifiers and basic type literals
- elt{ 0, token.IDENT, "foobar", literal },
- elt{ 0, token.IDENT, "a۰۱۸", literal },
- elt{ 0, token.IDENT, "foo६४", literal },
- elt{ 0, token.IDENT, "bar9876", literal },
- elt{ 0, token.INT, "0", literal },
- elt{ 0, token.INT, "01234567", literal },
- elt{ 0, token.INT, "0xcafebabe", literal },
- elt{ 0, token.FLOAT, "0.", literal },
- elt{ 0, token.FLOAT, ".0", literal },
- elt{ 0, token.FLOAT, "3.14159265", literal },
- elt{ 0, token.FLOAT, "1e0", literal },
- elt{ 0, token.FLOAT, "1e+100", literal },
- elt{ 0, token.FLOAT, "1e-100", literal },
- elt{ 0, token.FLOAT, "2.71828e-1000", literal },
- elt{ 0, token.CHAR, "'a'", literal },
- elt{ 0, token.CHAR, "'\\000'", literal },
- elt{ 0, token.CHAR, "'\\xFF'", literal },
- elt{ 0, token.CHAR, "'\\uff16'", literal },
- elt{ 0, token.CHAR, "'\\U0000ff16'", literal },
- elt{ 0, token.STRING, "`foobar`", literal },
+ elt{ token.IDENT, "foobar", literal },
+ elt{ token.IDENT, "a۰۱۸", literal },
+ elt{ token.IDENT, "foo६४", literal },
+ elt{ token.IDENT, "bar9876", literal },
+ elt{ token.INT, "0", literal },
+ elt{ token.INT, "01234567", literal },
+ elt{ token.INT, "0xcafebabe", literal },
+ elt{ token.FLOAT, "0.", literal },
+ elt{ token.FLOAT, ".0", literal },
+ elt{ token.FLOAT, "3.14159265", literal },
+ elt{ token.FLOAT, "1e0", literal },
+ elt{ token.FLOAT, "1e+100", literal },
+ elt{ token.FLOAT, "1e-100", literal },
+ elt{ token.FLOAT, "2.71828e-1000", literal },
+ elt{ token.CHAR, "'a'", literal },
+ elt{ token.CHAR, "'\\000'", literal },
+ elt{ token.CHAR, "'\\xFF'", literal },
+ elt{ token.CHAR, "'\\uff16'", literal },
+ elt{ token.CHAR, "'\\U0000ff16'", literal },
+ elt{ token.STRING, "`foobar`", literal },
// Operators and delimitors
- elt{ 0, token.ADD, "+", operator },
- elt{ 0, token.SUB, "-", operator },
- elt{ 0, token.MUL, "*", operator },
- elt{ 0, token.QUO, "/", operator },
- elt{ 0, token.REM, "%", operator },
-
- elt{ 0, token.AND, "&", operator },
- elt{ 0, token.OR, "|", operator },
- elt{ 0, token.XOR, "^", operator },
- elt{ 0, token.SHL, "<<", operator },
- elt{ 0, token.SHR, ">>", operator },
-
- elt{ 0, token.ADD_ASSIGN, "+=", operator },
- elt{ 0, token.SUB_ASSIGN, "-=", operator },
- elt{ 0, token.MUL_ASSIGN, "*=", operator },
- elt{ 0, token.QUO_ASSIGN, "/=", operator },
- elt{ 0, token.REM_ASSIGN, "%=", operator },
-
- elt{ 0, token.AND_ASSIGN, "&=", operator },
- elt{ 0, token.OR_ASSIGN, "|=", operator },
- elt{ 0, token.XOR_ASSIGN, "^=", operator },
- elt{ 0, token.SHL_ASSIGN, "<<=", operator },
- elt{ 0, token.SHR_ASSIGN, ">>=", operator },
-
- elt{ 0, token.LAND, "&&", operator },
- elt{ 0, token.LOR, "||", operator },
- elt{ 0, token.ARROW, "<-", operator },
- elt{ 0, token.INC, "++", operator },
- elt{ 0, token.DEC, "--", operator },
-
- elt{ 0, token.EQL, "==", operator },
- elt{ 0, token.LSS, "<", operator },
- elt{ 0, token.GTR, ">", operator },
- elt{ 0, token.ASSIGN, "=", operator },
- elt{ 0, token.NOT, "!", operator },
-
- elt{ 0, token.NEQ, "!=", operator },
- elt{ 0, token.LEQ, "<=", operator },
- elt{ 0, token.GEQ, ">=", operator },
- elt{ 0, token.DEFINE, ":=", operator },
- elt{ 0, token.ELLIPSIS, "...", operator },
-
- elt{ 0, token.LPAREN, "(", operator },
- elt{ 0, token.LBRACK, "[", operator },
- elt{ 0, token.LBRACE, "{", operator },
- elt{ 0, token.COMMA, ",", operator },
- elt{ 0, token.PERIOD, ".", operator },
-
- elt{ 0, token.RPAREN, ")", operator },
- elt{ 0, token.RBRACK, "]", operator },
- elt{ 0, token.RBRACE, "}", operator },
- elt{ 0, token.SEMICOLON, ";", operator },
- elt{ 0, token.COLON, ":", operator },
+ elt{ token.ADD, "+", operator },
+ elt{ token.SUB, "-", operator },
+ elt{ token.MUL, "*", operator },
+ elt{ token.QUO, "/", operator },
+ elt{ token.REM, "%", operator },
+
+ elt{ token.AND, "&", operator },
+ elt{ token.OR, "|", operator },
+ elt{ token.XOR, "^", operator },
+ elt{ token.SHL, "<<", operator },
+ elt{ token.SHR, ">>", operator },
+
+ elt{ token.ADD_ASSIGN, "+=", operator },
+ elt{ token.SUB_ASSIGN, "-=", operator },
+ elt{ token.MUL_ASSIGN, "*=", operator },
+ elt{ token.QUO_ASSIGN, "/=", operator },
+ elt{ token.REM_ASSIGN, "%=", operator },
+
+ elt{ token.AND_ASSIGN, "&=", operator },
+ elt{ token.OR_ASSIGN, "|=", operator },
+ elt{ token.XOR_ASSIGN, "^=", operator },
+ elt{ token.SHL_ASSIGN, "<<=", operator },
+ elt{ token.SHR_ASSIGN, ">>=", operator },
+
+ elt{ token.LAND, "&&", operator },
+ elt{ token.LOR, "||", operator },
+ elt{ token.ARROW, "<-", operator },
+ elt{ token.INC, "++", operator },
+ elt{ token.DEC, "--", operator },
+
+ elt{ token.EQL, "==", operator },
+ elt{ token.LSS, "<", operator },
+ elt{ token.GTR, ">", operator },
+ elt{ token.ASSIGN, "=", operator },
+ elt{ token.NOT, "!", operator },
+
+ elt{ token.NEQ, "!=", operator },
+ elt{ token.LEQ, "<=", operator },
+ elt{ token.GEQ, ">=", operator },
+ elt{ token.DEFINE, ":=", operator },
+ elt{ token.ELLIPSIS, "...", operator },
+
+ elt{ token.LPAREN, "(", operator },
+ elt{ token.LBRACK, "[", operator },
+ elt{ token.LBRACE, "{", operator },
+ elt{ token.COMMA, ",", operator },
+ elt{ token.PERIOD, ".", operator },
+
+ elt{ token.RPAREN, ")", operator },
+ elt{ token.RBRACK, "]", operator },
+ elt{ token.RBRACE, "}", operator },
+ elt{ token.SEMICOLON, ";", operator },
+ elt{ token.COLON, ":", operator },
// Keywords
- elt{ 0, token.BREAK, "break", keyword },
- elt{ 0, token.CASE, "case", keyword },
- elt{ 0, token.CHAN, "chan", keyword },
- elt{ 0, token.CONST, "const", keyword },
- elt{ 0, token.CONTINUE, "continue", keyword },
-
- elt{ 0, token.DEFAULT, "default", keyword },
- elt{ 0, token.DEFER, "defer", keyword },
- elt{ 0, token.ELSE, "else", keyword },
- elt{ 0, token.FALLTHROUGH, "fallthrough", keyword },
- elt{ 0, token.FOR, "for", keyword },
-
- elt{ 0, token.FUNC, "func", keyword },
- elt{ 0, token.GO, "go", keyword },
- elt{ 0, token.GOTO, "goto", keyword },
- elt{ 0, token.IF, "if", keyword },
- elt{ 0, token.IMPORT, "import", keyword },
-
- elt{ 0, token.INTERFACE, "interface", keyword },
- elt{ 0, token.MAP, "map", keyword },
- elt{ 0, token.PACKAGE, "package", keyword },
- elt{ 0, token.RANGE, "range", keyword },
- elt{ 0, token.RETURN, "return", keyword },
-
- elt{ 0, token.SELECT, "select", keyword },
- elt{ 0, token.STRUCT, "struct", keyword },
- elt{ 0, token.SWITCH, "switch", keyword },
- elt{ 0, token.TYPE, "type", keyword },
- elt{ 0, token.VAR, "var", keyword },
+ elt{ token.BREAK, "break", keyword },
+ elt{ token.CASE, "case", keyword },
+ elt{ token.CHAN, "chan", keyword },
+ elt{ token.CONST, "const", keyword },
+ elt{ token.CONTINUE, "continue", keyword },
+
+ elt{ token.DEFAULT, "default", keyword },
+ elt{ token.DEFER, "defer", keyword },
+ elt{ token.ELSE, "else", keyword },
+ elt{ token.FALLTHROUGH, "fallthrough", keyword },
+ elt{ token.FOR, "for", keyword },
+
+ elt{ token.FUNC, "func", keyword },
+ elt{ token.GO, "go", keyword },
+ elt{ token.GOTO, "goto", keyword },
+ elt{ token.IF, "if", keyword },
+ elt{ token.IMPORT, "import", keyword },
+
+ elt{ token.INTERFACE, "interface", keyword },
+ elt{ token.MAP, "map", keyword },
+ elt{ token.PACKAGE, "package", keyword },
+ elt{ token.RANGE, "range", keyword },
+ elt{ token.RETURN, "return", keyword },
+
+ elt{ token.SELECT, "select", keyword },
+ elt{ token.STRUCT, "struct", keyword },
+ elt{ token.SWITCH, "switch", keyword },
+ elt{ token.TYPE, "type", keyword },
+ elt{ token.VAR, "var", keyword },
}
-const whitespace = " \t "; // to separate tokens
-
-func init() {
- // set pos fields
- pos := 0;
- for i := 0; i < len(tokens); i++ {
- tokens[i].pos = pos;
- pos += len(tokens[i].lit) + len(whitespace);
- }
-}
-
+const whitespace = " \t \n\n\n"; // to separate tokens
type TestErrorHandler struct {
t *testing.T
@@ -174,38 +163,61 @@ func (h *TestErrorHandler) Error(loc scanner.Location, msg string) {
}
+func NewlineCount(s string) int {
+ n := 0;
+ for i := 0; i < len(s); i++ {
+ if s[i] == '\n' {
+ n++;
+ }
+ }
+ return n;
+}
+
+
func Test(t *testing.T) {
// make source
var src string;
for i, e := range tokens {
src += e.lit + whitespace;
}
-
- // set up scanner
- var s scanner.Scanner;
- s.Init(io.StringBytes(src), &TestErrorHandler{t}, true);
+ whitespace_linecount := NewlineCount(whitespace);
// verify scan
- for i, e := range tokens {
- loc, tok, lit := s.Scan();
- if loc.Pos != e.pos {
- t.Errorf("bad position for %s: got %d, expected %d", e.lit, loc.Pos, e.pos);
- }
- if tok != e.tok {
- t.Errorf("bad token for %s: got %s, expected %s", e.lit, token.TokenString(tok), token.TokenString(e.tok));
- }
- if token.IsLiteral(e.tok) && string(lit) != e.lit {
- t.Errorf("bad literal for %s: got %s, expected %s", e.lit, string(lit), e.lit);
+ index := 0;
+ eloc := scanner.Location{0, 1, 1};
+ scanner.Tokenize(io.StringBytes(src), &TestErrorHandler{t}, true,
+ func (loc Location, tok int, litb []byte) bool {
+ e := elt{token.EOF, "", special};
+ if index < len(tokens) {
+ e = tokens[index];
+ }
+ lit := string(litb);
+ if tok == token.EOF {
+ lit = "<EOF>";
+ eloc.Col = 0;
+ }
+ if loc.Pos != eloc.Pos {
+ t.Errorf("bad position for %s: got %d, expected %d", lit, loc.Pos, eloc.Pos);
+ }
+ if loc.Line != eloc.Line {
+ t.Errorf("bad line for %s: got %d, expected %d", lit, loc.Line, eloc.Line);
+ }
+ if loc.Col != eloc.Col {
+ t.Errorf("bad column for %s: got %d, expected %d", lit, loc.Col, eloc.Col);
+ }
+ if tok != e.tok {
+ t.Errorf("bad token for %s: got %s, expected %s", lit, token.TokenString(tok), token.TokenString(e.tok));
+ }
+ if token.IsLiteral(e.tok) && lit != e.lit {
+ t.Errorf("bad literal for %s: got %s, expected %s", lit, lit, e.lit);
+ }
+ if tokenclass(tok) != e.class {
+ t.Errorf("bad class for %s: got %d, expected %d", lit, tokenclass(tok), e.class);
+ }
+ eloc.Pos += len(lit) + len(whitespace);
+ eloc.Line += NewlineCount(lit) + whitespace_linecount;
+ index++;
+ return tok != token.EOF;
}
- if tokenclass(tok) != e.class {
- t.Errorf("bad class for %s: got %d, expected %d", e.lit, tokenclass(tok), e.class);
- }
- }
- loc, tok, lit := s.Scan();
- if tok != token.EOF {
- t.Errorf("bad token at eof: got %s, expected EOF", token.TokenString(tok));
- }
- if tokenclass(tok) != special {
- t.Errorf("bad class at eof: got %d, expected %d", tokenclass(tok), special);
- }
+ );
}