summaryrefslogtreecommitdiff
path: root/src/pkg
diff options
context:
space:
mode:
Diffstat (limited to 'src/pkg')
-rw-r--r--src/pkg/archive/tar/reader.go6
-rw-r--r--src/pkg/archive/tar/writer.go2
-rw-r--r--src/pkg/base64/base64.go6
-rw-r--r--src/pkg/base64/base64_test.go4
-rw-r--r--src/pkg/bignum/arith.go4
-rwxr-xr-xsrc/pkg/bignum/bignum.go4
-rw-r--r--src/pkg/bignum/nrdiv_test.go4
-rw-r--r--src/pkg/bufio/bufio.go158
-rw-r--r--src/pkg/bytes/buffer.go40
-rw-r--r--src/pkg/bytes/buffer_test.go2
-rw-r--r--src/pkg/bytes/bytes_test.go154
-rw-r--r--src/pkg/compress/flate/deflate.go139
-rw-r--r--src/pkg/compress/flate/deflate_test.go337
-rw-r--r--src/pkg/compress/flate/flate_test.go12
-rw-r--r--src/pkg/compress/flate/huffman_bit_writer.go112
-rw-r--r--src/pkg/compress/flate/inflate.go22
-rw-r--r--src/pkg/compress/flate/reverse_bits.go2
-rw-r--r--src/pkg/compress/flate/token.go16
-rw-r--r--src/pkg/compress/gzip/gunzip.go2
-rw-r--r--src/pkg/compress/gzip/gunzip_test.go59
-rw-r--r--src/pkg/compress/zlib/reader.go4
-rw-r--r--src/pkg/container/vector/vector.go8
-rw-r--r--src/pkg/crypto/aes/aes_test.go4
-rw-r--r--src/pkg/crypto/aes/block.go58
-rw-r--r--src/pkg/crypto/block/ctr.go2
-rw-r--r--src/pkg/crypto/block/ctr_aes_test.go4
-rw-r--r--src/pkg/crypto/block/ecb.go8
-rw-r--r--src/pkg/crypto/block/ofb_aes_test.go4
-rw-r--r--src/pkg/datafmt/parser.go4
-rw-r--r--src/pkg/debug/binary/binary.go4
-rw-r--r--src/pkg/debug/dwarf/buf.go6
-rw-r--r--src/pkg/debug/dwarf/entry.go10
-rw-r--r--src/pkg/debug/elf/elf_test.go2
-rw-r--r--src/pkg/debug/elf/file.go2
-rw-r--r--src/pkg/debug/elf/file_test.go32
-rw-r--r--src/pkg/debug/gosym/pclntab.go2
-rw-r--r--src/pkg/debug/proc/proc_linux.go4
-rw-r--r--src/pkg/ebnf/parser.go14
-rw-r--r--src/pkg/flag/flag.go4
-rw-r--r--src/pkg/fmt/print.go2
-rw-r--r--src/pkg/go/ast/filter.go4
-rw-r--r--src/pkg/go/doc/doc.go158
-rw-r--r--src/pkg/gob/encode.go6
-rw-r--r--src/pkg/http/client.go2
-rw-r--r--src/pkg/http/request.go178
-rw-r--r--src/pkg/http/server.go2
-rw-r--r--src/pkg/image/png/reader.go2
-rw-r--r--src/pkg/io/pipe.go2
-rw-r--r--src/pkg/log/log.go8
-rw-r--r--src/pkg/log/log_test.go6
-rw-r--r--src/pkg/math/pow10.go4
-rw-r--r--src/pkg/math/sinh.go4
-rw-r--r--src/pkg/math/sqrt.go8
-rw-r--r--src/pkg/os/exec.go2
-rw-r--r--src/pkg/os/path_test.go8
-rw-r--r--src/pkg/os/stat_nacl_386.go6
-rw-r--r--src/pkg/os/time.go2
-rw-r--r--src/pkg/path/path.go11
-rw-r--r--src/pkg/path/path_test.go19
-rw-r--r--src/pkg/regexp/all_test.go6
-rw-r--r--src/pkg/strconv/atof.go8
-rw-r--r--src/pkg/strconv/decimal_test.go2
-rw-r--r--src/pkg/strconv/fp_test.go4
-rw-r--r--src/pkg/strconv/ftoa.go54
-rw-r--r--src/pkg/template/template_test.go134
-rw-r--r--src/pkg/testing/iotest/reader.go2
-rw-r--r--src/pkg/testing/regexp.go288
-rw-r--r--src/pkg/testing/regexp_test.go127
-rw-r--r--src/pkg/testing/testing.go4
-rw-r--r--src/pkg/time/time.go4
-rw-r--r--src/pkg/time/time_test.go39
-rw-r--r--src/pkg/time/zoneinfo.go2
-rw-r--r--src/pkg/unicode/script_test.go201
-rw-r--r--src/pkg/utf8/utf8.go112
-rw-r--r--src/pkg/utf8/utf8_test.go2
-rw-r--r--src/pkg/xml/xml.go75
76 files changed, 1415 insertions, 1344 deletions
diff --git a/src/pkg/archive/tar/reader.go b/src/pkg/archive/tar/reader.go
index 81d79841d..083301b5f 100644
--- a/src/pkg/archive/tar/reader.go
+++ b/src/pkg/archive/tar/reader.go
@@ -121,11 +121,11 @@ func (tr *Reader) readHeader() *Header {
}
// Two blocks of zero bytes marks the end of the archive.
- if bytes.Equal(header, zeroBlock[0:blockSize]) {
+ if bytes.Equal(header, zeroBlock[0 : blockSize]) {
if _, tr.err = io.ReadFull(tr.r, header); tr.err != nil {
return nil;
}
- if !bytes.Equal(header, zeroBlock[0:blockSize]) {
+ if !bytes.Equal(header, zeroBlock[0 : blockSize]) {
tr.err = HeaderError;
}
return nil;
@@ -198,7 +198,7 @@ func (tr *Reader) readHeader() *Header {
// Maximum value of hdr.Size is 64 GB (12 octal digits),
// so there's no risk of int64 overflowing.
tr.nb = int64(hdr.Size);
- tr.pad = -tr.nb & (blockSize-1); // blockSize is a power of two
+ tr.pad = -tr.nb & (blockSize - 1); // blockSize is a power of two
return hdr;
}
diff --git a/src/pkg/archive/tar/writer.go b/src/pkg/archive/tar/writer.go
index f253a01ce..5e0626493 100644
--- a/src/pkg/archive/tar/writer.go
+++ b/src/pkg/archive/tar/writer.go
@@ -120,7 +120,7 @@ func (tw *Writer) WriteHeader(hdr *Header) os.Error {
}
tw.nb = int64(hdr.Size);
- tw.pad = -tw.nb & (blockSize-1); // blockSize is a power of two
+ tw.pad = -tw.nb & (blockSize - 1); // blockSize is a power of two
header := make([]byte, blockSize);
s := slicer(header);
diff --git a/src/pkg/base64/base64.go b/src/pkg/base64/base64.go
index c3c2c911e..111dc1da6 100644
--- a/src/pkg/base64/base64.go
+++ b/src/pkg/base64/base64.go
@@ -143,7 +143,7 @@ func (e *encoder) Write(p []byte) (n int, err os.Error) {
// Large interior chunks.
for len(p) > 3 {
- nn := len(e.out) / 4 * 3;
+ nn := len(e.out)/4*3;
if nn > len(p) {
nn = len(p);
}
@@ -286,7 +286,7 @@ func (d *decoder) Read(p []byte) (n int, err os.Error) {
// Use leftover decoded output from last read.
if len(d.out) > 0 {
n = bytes.Copy(p, d.out);
- d.out = d.out[n : len(d.out)];
+ d.out = d.out[n:len(d.out)];
return n, nil;
}
@@ -311,7 +311,7 @@ func (d *decoder) Read(p []byte) (n int, err os.Error) {
nw, d.end, d.err = d.enc.decode(d.buf[0:nr], &d.outbuf);
d.out = d.outbuf[0:nw];
n = bytes.Copy(p, d.out);
- d.out = d.out[n : len(d.out)];
+ d.out = d.out[n:len(d.out)];
} else {
n, d.end, d.err = d.enc.decode(d.buf[0:nr], p);
}
diff --git a/src/pkg/base64/base64_test.go b/src/pkg/base64/base64_test.go
index fcf58c324..54d2326f5 100644
--- a/src/pkg/base64/base64_test.go
+++ b/src/pkg/base64/base64_test.go
@@ -104,7 +104,7 @@ func TestDecode(t *testing.T) {
testEqual(t, "Decode(%q) = error %v, want %v", p.encoded, err, os.Error(nil));
testEqual(t, "Decode(%q) = length %v, want %v", p.encoded, count, len(p.decoded));
if len(p.encoded) > 0 {
- testEqual(t, "Decode(%q) = end %v, want %v", p.encoded, end, (p.encoded[len(p.encoded) - 1] == '='));
+ testEqual(t, "Decode(%q) = end %v, want %v", p.encoded, end, (p.encoded[len(p.encoded)-1] == '='));
}
testEqual(t, "Decode(%q) = %q, want %q", p.encoded, string(dbuf[0:count]), p.decoded);
}
@@ -130,7 +130,7 @@ func TestDecoder(t *testing.T) {
func TestDecoderBuffering(t *testing.T) {
for bs := 1; bs <= 12; bs++ {
decoder := NewDecoder(StdEncoding, bytes.NewBufferString(bigtest.encoded));
- buf := make([]byte, len(bigtest.decoded) + 12);
+ buf := make([]byte, len(bigtest.decoded)+12);
var total int;
for total = 0; total < len(bigtest.decoded); {
n, err := decoder.Read(buf[total : total+bs]);
diff --git a/src/pkg/bignum/arith.go b/src/pkg/bignum/arith.go
index f60b66828..a84a1139e 100644
--- a/src/pkg/bignum/arith.go
+++ b/src/pkg/bignum/arith.go
@@ -18,7 +18,7 @@ func Mul128(x, y uint64) (z1, z0 uint64) {
// and return the product as 2 words.
const (
- W = uint(unsafe.Sizeof(x)) * 8;
+ W = uint(unsafe.Sizeof(x))*8;
W2 = W/2;
B2 = 1<<W2;
M2 = B2-1;
@@ -80,7 +80,7 @@ func MulAdd128(x, y, c uint64) (z1, z0 uint64) {
// and return the product as 2 words.
const (
- W = uint(unsafe.Sizeof(x)) * 8;
+ W = uint(unsafe.Sizeof(x))*8;
W2 = W/2;
B2 = 1<<W2;
M2 = B2-1;
diff --git a/src/pkg/bignum/bignum.go b/src/pkg/bignum/bignum.go
index 55bdad0ab..961d19c42 100755
--- a/src/pkg/bignum/bignum.go
+++ b/src/pkg/bignum/bignum.go
@@ -899,9 +899,9 @@ func hexvalue(ch byte) uint {
case '0' <= ch && ch <= '9':
d = uint(ch-'0');
case 'a' <= ch && ch <= 'f':
- d = uint(ch-'a') + 10;
+ d = uint(ch-'a')+10;
case 'A' <= ch && ch <= 'F':
- d = uint(ch-'A') + 10;
+ d = uint(ch-'A')+10;
}
return d;
}
diff --git a/src/pkg/bignum/nrdiv_test.go b/src/pkg/bignum/nrdiv_test.go
index 6559d62bc..af21df2e6 100644
--- a/src/pkg/bignum/nrdiv_test.go
+++ b/src/pkg/bignum/nrdiv_test.go
@@ -99,7 +99,7 @@ func nrDivEst(x0, y0 Natural) Natural {
// Determine a scale factor f = 2^e such that
// 0.5 <= y/f == y*(2^-e) < 1.0
// and scale y accordingly.
- e := int(y.m.Log2()) + 1;
+ e := int(y.m.Log2())+1;
y.e -= e;
// t1
@@ -133,7 +133,7 @@ func nrDivEst(x0, y0 Natural) Natural {
// reduce mantissa size
// TODO: Find smaller bound as it will reduce
// computation time massively.
- d := int(r.m.Log2() + 1) - maxLen;
+ d := int(r.m.Log2() + 1)-maxLen;
if d > 0 {
r = fpNat{r.m.Shr(uint(d)), r.e + d};
}
diff --git a/src/pkg/bufio/bufio.go b/src/pkg/bufio/bufio.go
index 895dbf6e7..5b71182c4 100644
--- a/src/pkg/bufio/bufio.go
+++ b/src/pkg/bufio/bufio.go
@@ -22,7 +22,7 @@ import (
// - buffered output
const (
- defaultBufSize = 4096
+ defaultBufSize = 4096;
)
// Errors introduced by this package.
@@ -31,20 +31,21 @@ type Error struct {
}
var (
- ErrInvalidUnreadByte os.Error = &Error{"bufio: invalid use of UnreadByte"};
- ErrBufferFull os.Error = &Error{"bufio: buffer full"};
- errInternal os.Error = &Error{"bufio: internal error"};
+ ErrInvalidUnreadByte os.Error = &Error{"bufio: invalid use of UnreadByte"};
+ ErrBufferFull os.Error = &Error{"bufio: buffer full"};
+ errInternal os.Error = &Error{"bufio: internal error"};
)
// BufSizeError is the error representing an invalid buffer size.
type BufSizeError int
+
func (b BufSizeError) String() string {
return "bufio: bad buffer size " + strconv.Itoa(int(b));
}
func copySlice(dst []byte, src []byte) {
for i := 0; i < len(dst); i++ {
- dst[i] = src[i]
+ dst[i] = src[i];
}
}
@@ -53,11 +54,11 @@ func copySlice(dst []byte, src []byte) {
// Reader implements buffering for an io.Reader object.
type Reader struct {
- buf []byte;
- rd io.Reader;
- r, w int;
- err os.Error;
- lastbyte int;
+ buf []byte;
+ rd io.Reader;
+ r, w int;
+ err os.Error;
+ lastbyte int;
}
// NewReaderSize creates a new Reader whose buffer has the specified size,
@@ -66,18 +67,18 @@ type Reader struct {
// It returns the Reader and any error.
func NewReaderSize(rd io.Reader, size int) (*Reader, os.Error) {
if size <= 0 {
- return nil, BufSizeError(size)
+ return nil, BufSizeError(size);
}
// Is it already a Reader?
b, ok := rd.(*Reader);
if ok && len(b.buf) >= size {
- return b, nil
+ return b, nil;
}
b = new(Reader);
b.buf = make([]byte, size);
b.rd = rd;
b.lastbyte = -1;
- return b, nil
+ return b, nil;
}
// NewReader returns a new Reader whose buffer has the default size.
@@ -94,15 +95,15 @@ func NewReader(rd io.Reader) *Reader {
func (b *Reader) fill() {
// Slide existing data to beginning.
if b.w > b.r {
- copySlice(b.buf[0:b.w-b.r], b.buf[b.r:b.w]);
+ copySlice(b.buf[0 : b.w - b.r], b.buf[b.r : b.w]);
b.w -= b.r;
} else {
- b.w = 0
+ b.w = 0;
}
b.r = 0;
// Read new data.
- n, e := b.rd.Read(b.buf[b.w:len(b.buf)]);
+ n, e := b.rd.Read(b.buf[b.w : len(b.buf)]);
b.w += n;
if e != nil {
b.err = e;
@@ -120,7 +121,7 @@ func (b *Reader) Read(p []byte) (nn int, err os.Error) {
n := len(p);
if b.w == b.r {
if b.err != nil {
- return nn, b.err
+ return nn, b.err;
}
if len(p) >= len(b.buf) {
// Large read, empty buffer.
@@ -137,15 +138,15 @@ func (b *Reader) Read(p []byte) (nn int, err os.Error) {
continue;
}
if n > b.w - b.r {
- n = b.w - b.r
+ n = b.w - b.r;
}
- copySlice(p[0:n], b.buf[b.r:b.r+n]);
+ copySlice(p[0:n], b.buf[b.r : b.r + n]);
p = p[n:len(p)];
b.r += n;
- b.lastbyte = int(b.buf[b.r-1]);
- nn += n
+ b.lastbyte = int(b.buf[b.r - 1]);
+ nn += n;
}
- return nn, nil
+ return nn, nil;
}
// ReadByte reads and returns a single byte.
@@ -153,14 +154,14 @@ func (b *Reader) Read(p []byte) (nn int, err os.Error) {
func (b *Reader) ReadByte() (c byte, err os.Error) {
for b.w == b.r {
if b.err != nil {
- return 0, b.err
+ return 0, b.err;
}
b.fill();
}
c = b.buf[b.r];
b.r++;
b.lastbyte = int(c);
- return c, nil
+ return c, nil;
}
// UnreadByte unreads the last byte. Only the most recently read byte can be unread.
@@ -173,17 +174,17 @@ func (b *Reader) UnreadByte() os.Error {
return nil;
}
if b.r <= 0 {
- return ErrInvalidUnreadByte
+ return ErrInvalidUnreadByte;
}
b.r--;
b.lastbyte = -1;
- return nil
+ return nil;
}
// ReadRune reads a single UTF-8 encoded Unicode character and returns the
// rune and its size in bytes.
func (b *Reader) ReadRune() (rune int, size int, err os.Error) {
- for b.r + utf8.UTFMax > b.w && !utf8.FullRune(b.buf[b.r:b.w]) && b.err == nil {
+ for b.r + utf8.UTFMax > b.w && !utf8.FullRune(b.buf[b.r : b.w]) && b.err == nil {
b.fill();
}
if b.r == b.w {
@@ -191,11 +192,11 @@ func (b *Reader) ReadRune() (rune int, size int, err os.Error) {
}
rune, size = int(b.buf[b.r]), 1;
if rune >= 0x80 {
- rune, size = utf8.DecodeRune(b.buf[b.r:b.w]);
+ rune, size = utf8.DecodeRune(b.buf[b.r : b.w]);
}
b.r += size;
- b.lastbyte = int(b.buf[b.r-1]);
- return rune, size, nil
+ b.lastbyte = int(b.buf[b.r - 1]);
+ return rune, size, nil;
}
// Helper function: look for byte c in array p,
@@ -203,10 +204,10 @@ func (b *Reader) ReadRune() (rune int, size int, err os.Error) {
func findByte(p []byte, c byte) int {
for i := 0; i < len(p); i++ {
if p[i] == c {
- return i
+ return i;
}
}
- return -1
+ return -1;
}
// Buffered returns the number of bytes that can be read from the current buffer.
@@ -226,33 +227,33 @@ func (b *Reader) Buffered() int {
// ReadSlice returns err != nil if and only if line does not end in delim.
func (b *Reader) ReadSlice(delim byte) (line []byte, err os.Error) {
// Look in buffer.
- if i := findByte(b.buf[b.r:b.w], delim); i >= 0 {
- line1 := b.buf[b.r:b.r+i+1];
+ if i := findByte(b.buf[b.r : b.w], delim); i >= 0 {
+ line1 := b.buf[b.r : b.r + i + 1];
b.r += i+1;
- return line1, nil
+ return line1, nil;
}
// Read more into buffer, until buffer fills or we find delim.
for {
if b.err != nil {
- line := b.buf[b.r:b.w];
+ line := b.buf[b.r : b.w];
b.r = b.w;
- return line, b.err
+ return line, b.err;
}
n := b.Buffered();
b.fill();
// Search new part of buffer
- if i := findByte(b.buf[n:b.w], delim); i >= 0 {
- line := b.buf[0:n+i+1];
+ if i := findByte(b.buf[n : b.w], delim); i >= 0 {
+ line := b.buf[0 : n+i+1];
b.r = n+i+1;
- return line, nil
+ return line, nil;
}
// Buffer is full?
if b.Buffered() >= len(b.buf) {
- return nil, ErrBufferFull
+ return nil, ErrBufferFull;
}
}
panic("not reached");
@@ -275,11 +276,11 @@ func (b *Reader) ReadBytes(delim byte) (line []byte, err os.Error) {
var e os.Error;
frag, e = b.ReadSlice(delim);
if e == nil { // got final fragment
- break
+ break;
}
if e != ErrBufferFull { // unexpected error
err = e;
- break
+ break;
}
// Read bytes out of buffer.
@@ -289,12 +290,12 @@ func (b *Reader) ReadBytes(delim byte) (line []byte, err os.Error) {
if e != nil {
frag = buf[0:n];
err = e;
- break
+ break;
}
if n != len(buf) {
frag = buf[0:n];
err = errInternal;
- break
+ break;
}
// Grow list if needed.
@@ -305,7 +306,7 @@ func (b *Reader) ReadBytes(delim byte) (line []byte, err os.Error) {
for i := 0; i < len(full); i++ {
newfull[i] = full[i];
}
- full = newfull
+ full = newfull;
}
// Save buffer
@@ -316,7 +317,7 @@ func (b *Reader) ReadBytes(delim byte) (line []byte, err os.Error) {
// Allocate new buffer to hold the full pieces and the fragment.
n := 0;
for i := 0; i < nfull; i++ {
- n += len(full[i])
+ n += len(full[i]);
}
n += len(frag);
@@ -324,11 +325,11 @@ func (b *Reader) ReadBytes(delim byte) (line []byte, err os.Error) {
buf := make([]byte, n);
n = 0;
for i := 0; i < nfull; i++ {
- copySlice(buf[n:n+len(full[i])], full[i]);
- n += len(full[i])
+ copySlice(buf[n : n+len(full[i])], full[i]);
+ n += len(full[i]);
}
- copySlice(buf[n:n+len(frag)], frag);
- return buf, err
+ copySlice(buf[n : n+len(frag)], frag);
+ return buf, err;
}
// ReadString reads until the first occurrence of delim in the input,
@@ -346,10 +347,10 @@ func (b *Reader) ReadString(delim byte) (line string, err os.Error) {
// Writer implements buffering for an io.Writer object.
type Writer struct {
- err os.Error;
- buf []byte;
- n int;
- wr io.Writer;
+ err os.Error;
+ buf []byte;
+ n int;
+ wr io.Writer;
}
// NewWriterSize creates a new Writer whose buffer has the specified size,
@@ -358,17 +359,17 @@ type Writer struct {
// It returns the Writer and any error.
func NewWriterSize(wr io.Writer, size int) (*Writer, os.Error) {
if size <= 0 {
- return nil, BufSizeError(size)
+ return nil, BufSizeError(size);
}
// Is it already a Writer?
b, ok := wr.(*Writer);
if ok && len(b.buf) >= size {
- return b, nil
+ return b, nil;
}
b = new(Writer);
b.buf = make([]byte, size);
b.wr = wr;
- return b, nil
+ return b, nil;
}
// NewWriter returns a new Writer whose buffer has the default size.
@@ -384,32 +385,32 @@ func NewWriter(wr io.Writer) *Writer {
// Flush writes any buffered data to the underlying io.Writer.
func (b *Writer) Flush() os.Error {
if b.err != nil {
- return b.err
+ return b.err;
}
- n, e := b.wr.Write(b.buf[0:b.n]);
+ n, e := b.wr.Write(b.buf[0 : b.n]);
if n < b.n && e == nil {
e = io.ErrShortWrite;
}
if e != nil {
if n > 0 && n < b.n {
- copySlice(b.buf[0:b.n-n], b.buf[n:b.n])
+ copySlice(b.buf[0 : b.n - n], b.buf[n : b.n]);
}
b.n -= n;
b.err = e;
- return e
+ return e;
}
b.n = 0;
- return nil
+ return nil;
}
// Available returns how many bytes are unused in the buffer.
func (b *Writer) Available() int {
- return len(b.buf) - b.n
+ return len(b.buf) - b.n;
}
// Buffered returns the number of bytes that have been written into the current buffer.
func (b *Writer) Buffered() int {
- return b.n
+ return b.n;
}
// Write writes the contents of p into the buffer.
@@ -418,16 +419,16 @@ func (b *Writer) Buffered() int {
// why the write is short.
func (b *Writer) Write(p []byte) (nn int, err os.Error) {
if b.err != nil {
- return 0, b.err
+ return 0, b.err;
}
nn = 0;
for len(p) > 0 {
n := b.Available();
if n <= 0 {
if b.Flush(); b.err != nil {
- break
+ break;
}
- n = b.Available()
+ n = b.Available();
}
if b.Available() == 0 && len(p) >= len(b.buf) {
// Large write, empty buffer.
@@ -441,33 +442,33 @@ func (b *Writer) Write(p []byte) (nn int, err os.Error) {
continue;
}
if n > len(p) {
- n = len(p)
+ n = len(p);
}
- copySlice(b.buf[b.n:b.n+n], p[0:n]);
+ copySlice(b.buf[b.n : b.n + n], p[0:n]);
b.n += n;
nn += n;
- p = p[n:len(p)]
+ p = p[n:len(p)];
}
- return nn, b.err
+ return nn, b.err;
}
// WriteByte writes a single byte.
func (b *Writer) WriteByte(c byte) os.Error {
if b.err != nil {
- return b.err
+ return b.err;
}
if b.Available() <= 0 && b.Flush() != nil {
- return b.err
+ return b.err;
}
b.buf[b.n] = c;
b.n++;
- return nil
+ return nil;
}
// WriteString writes a string.
func (b *Writer) WriteString(s string) os.Error {
if b.err != nil {
- return b.err
+ return b.err;
}
// Common case, worth making fast.
if b.Available() >= len(s) || len(b.buf) >= len(s) && b.Flush() == nil {
@@ -480,7 +481,7 @@ func (b *Writer) WriteString(s string) os.Error {
for i := 0; i < len(s); i++ { // loop over bytes, not runes.
b.WriteByte(s[i]);
}
- return b.err
+ return b.err;
}
// buffered input and output
@@ -494,6 +495,5 @@ type ReadWriter struct {
// NewReadWriter allocates a new ReadWriter that dispatches to r and w.
func NewReadWriter(r *Reader, w *Writer) *ReadWriter {
- return &ReadWriter{r, w}
+ return &ReadWriter{r, w};
}
-
diff --git a/src/pkg/bytes/buffer.go b/src/pkg/bytes/buffer.go
index 0080d248b..7acddc4bc 100644
--- a/src/pkg/bytes/buffer.go
+++ b/src/pkg/bytes/buffer.go
@@ -38,19 +38,19 @@ type Buffer struct {
// Bytes returns the contents of the unread portion of the buffer;
// len(b.Bytes()) == b.Len().
func (b *Buffer) Bytes() []byte {
- return b.buf[b.off : len(b.buf)]
+ return b.buf[b.off : len(b.buf)];
}
// String returns the contents of the unread portion of the buffer
// as a string.
func (b *Buffer) String() string {
- return string(b.buf[b.off : len(b.buf)])
+ return string(b.buf[b.off : len(b.buf)]);
}
// Len returns the number of bytes of the unread portion of the buffer;
// b.Len() == len(b.Bytes()).
func (b *Buffer) Len() int {
- return len(b.buf) - b.off
+ return len(b.buf) - b.off;
}
// Truncate discards all but the first n unread bytes from the buffer.
@@ -75,21 +75,21 @@ func (b *Buffer) Write(p []byte) (n int, err os.Error) {
m := b.Len();
n = len(p);
- if len(b.buf) + n > cap(b.buf) {
+ if len(b.buf)+n > cap(b.buf) {
// not enough space at end
buf := b.buf;
- if m + n > cap(b.buf) {
+ if m+n > cap(b.buf) {
// not enough space anywhere
- buf = make([]byte, 2*cap(b.buf) + n)
+ buf = make([]byte, 2*cap(b.buf) + n);
}
- copyBytes(buf, 0, b.buf[b.off:b.off+m]);
+ copyBytes(buf, 0, b.buf[b.off : b.off + m]);
b.buf = buf;
- b.off = 0
+ b.off = 0;
}
b.buf = b.buf[0 : b.off + m + n];
copyBytes(b.buf, b.off + m, p);
- return n, nil
+ return n, nil;
}
// WriteString appends the contents of s to the buffer. The return
@@ -98,21 +98,21 @@ func (b *Buffer) WriteString(s string) (n int, err os.Error) {
m := b.Len();
n = len(s);
- if len(b.buf) + n > cap(b.buf) {
+ if len(b.buf)+n > cap(b.buf) {
// not enough space at end
buf := b.buf;
- if m + n > cap(b.buf) {
+ if m+n > cap(b.buf) {
// not enough space anywhere
- buf = make([]byte, 2*cap(b.buf) + n)
+ buf = make([]byte, 2*cap(b.buf) + n);
}
- copyBytes(buf, 0, b.buf[b.off:b.off+m]);
+ copyBytes(buf, 0, b.buf[b.off : b.off + m]);
b.buf = buf;
- b.off = 0
+ b.off = 0;
}
b.buf = b.buf[0 : b.off + m + n];
- copyString(b.buf, b.off+m, s);
- return n, nil
+ copyString(b.buf, b.off + m, s);
+ return n, nil;
}
// WriteByte appends the byte c to the buffer.
@@ -134,19 +134,19 @@ func (b *Buffer) WriteByte(c byte) os.Error {
// otherwise it is nil.
func (b *Buffer) Read(p []byte) (n int, err os.Error) {
if b.off >= len(b.buf) {
- return 0, os.EOF
+ return 0, os.EOF;
}
m := b.Len();
n = len(p);
if n > m {
// more bytes requested than available
- n = m
+ n = m;
}
- copyBytes(p, 0, b.buf[b.off:b.off+n]);
+ copyBytes(p, 0, b.buf[b.off : b.off + n]);
b.off += n;
- return n, err
+ return n, err;
}
// ReadByte reads and returns the next byte from the buffer.
diff --git a/src/pkg/bytes/buffer_test.go b/src/pkg/bytes/buffer_test.go
index d886790e7..1082e0a11 100644
--- a/src/pkg/bytes/buffer_test.go
+++ b/src/pkg/bytes/buffer_test.go
@@ -19,7 +19,7 @@ var bytes []byte // test data; same as data but as a slice.
func init() {
bytes = make([]byte, N);
for i := 0; i < N; i++ {
- bytes[i] = 'a' + byte(i%26);
+ bytes[i] = 'a'+byte(i%26);
}
data = string(bytes);
}
diff --git a/src/pkg/bytes/bytes_test.go b/src/pkg/bytes/bytes_test.go
index df4d4c2d0..094d71186 100644
--- a/src/pkg/bytes/bytes_test.go
+++ b/src/pkg/bytes/bytes_test.go
@@ -5,10 +5,10 @@
package bytes_test
import (
- . "bytes";
- "strings";
- "testing";
- "unicode";
+ . "bytes";
+ "strings";
+ "testing";
+ "unicode";
)
func eq(a, b []string) bool {
@@ -26,9 +26,9 @@ func eq(a, b []string) bool {
func arrayOfString(a [][]byte) []string {
result := make([]string, len(a));
for j := 0; j < len(a); j++ {
- result[j] = string(a[j])
+ result[j] = string(a[j]);
}
- return result
+ return result;
}
// For ease of reading, the test cases use strings that are converted to byte
@@ -40,21 +40,22 @@ var commas = "1,2,3,4"
var dots = "1....2....3....4"
type CompareTest struct {
- a string;
- b string;
- cmp int;
+ a string;
+ b string;
+ cmp int;
}
-var comparetests = []CompareTest {
- CompareTest{ "", "", 0 },
- CompareTest{ "a", "", 1 },
- CompareTest{ "", "a", -1 },
- CompareTest{ "abc", "abc", 0 },
- CompareTest{ "ab", "abc", -1 },
- CompareTest{ "abc", "ab", 1 },
- CompareTest{ "x", "ab", 1 },
- CompareTest{ "ab", "x", -1 },
- CompareTest{ "x", "a", 1 },
- CompareTest{ "b", "x", -1 },
+
+var comparetests = []CompareTest{
+ CompareTest{"", "", 0},
+ CompareTest{"a", "", 1},
+ CompareTest{"", "a", -1},
+ CompareTest{"abc", "abc", 0},
+ CompareTest{"ab", "abc", -1},
+ CompareTest{"abc", "ab", 1},
+ CompareTest{"x", "ab", 1},
+ CompareTest{"ab", "x", -1},
+ CompareTest{"x", "a", 1},
+ CompareTest{"b", "x", -1},
}
func TestCompare(t *testing.T) {
@@ -67,7 +68,7 @@ func TestCompare(t *testing.T) {
if cmp != tt.cmp {
t.Errorf(`Compare(%q, %q) = %v`, tt.a, tt.b, cmp);
}
- if eql != (tt.cmp==0) {
+ if eql != (tt.cmp == 0) {
t.Errorf(`Equal(%q, %q) = %v`, tt.a, tt.b, eql);
}
}
@@ -75,17 +76,19 @@ func TestCompare(t *testing.T) {
type ExplodeTest struct {
- s string;
- n int;
- a []string;
+ s string;
+ n int;
+ a []string;
}
-var explodetests = []ExplodeTest {
- ExplodeTest{ abcd, 0, []string{"a", "b", "c", "d"} },
- ExplodeTest{ faces, 0, []string{"☺", "☻", "☹"} },
- ExplodeTest{ abcd, 2, []string{"a", "bcd"} },
+
+var explodetests = []ExplodeTest{
+ ExplodeTest{abcd, 0, []string{"a", "b", "c", "d"}},
+ ExplodeTest{faces, 0, []string{"☺", "☻", "☹"}},
+ ExplodeTest{abcd, 2, []string{"a", "bcd"}},
}
+
func TestExplode(t *testing.T) {
- for _, tt := range(explodetests) {
+ for _, tt := range (explodetests) {
a := Split(strings.Bytes(tt.s), nil, tt.n);
result := arrayOfString(a);
if !eq(result, tt.a) {
@@ -101,25 +104,26 @@ func TestExplode(t *testing.T) {
type SplitTest struct {
- s string;
- sep string;
- n int;
- a []string;
+ s string;
+ sep string;
+ n int;
+ a []string;
}
-var splittests = []SplitTest {
- SplitTest{ abcd, "a", 0, []string{"", "bcd"} },
- SplitTest{ abcd, "z", 0, []string{"abcd"} },
- SplitTest{ abcd, "", 0, []string{"a", "b", "c", "d"} },
- SplitTest{ commas, ",", 0, []string{"1", "2", "3", "4"} },
- SplitTest{ dots, "...", 0, []string{"1", ".2", ".3", ".4"} },
- SplitTest{ faces, "☹", 0, []string{"☺☻", ""} },
- SplitTest{ faces, "~", 0, []string{faces} },
- SplitTest{ faces, "", 0, []string{"☺", "☻", "☹"} },
- SplitTest{ "1 2 3 4", " ", 3, []string{"1", "2", "3 4"} },
- SplitTest{ "1 2 3", " ", 3, []string{"1", "2", "3"} },
- SplitTest{ "1 2", " ", 3, []string{"1", "2"} },
- SplitTest{ "123", "", 2, []string{"1", "23"} },
- SplitTest{ "123", "", 17, []string{"1", "2", "3"} },
+
+var splittests = []SplitTest{
+ SplitTest{abcd, "a", 0, []string{"", "bcd"}},
+ SplitTest{abcd, "z", 0, []string{"abcd"}},
+ SplitTest{abcd, "", 0, []string{"a", "b", "c", "d"}},
+ SplitTest{commas, ",", 0, []string{"1", "2", "3", "4"}},
+ SplitTest{dots, "...", 0, []string{"1", ".2", ".3", ".4"}},
+ SplitTest{faces, "☹", 0, []string{"☺☻", ""}},
+ SplitTest{faces, "~", 0, []string{faces}},
+ SplitTest{faces, "", 0, []string{"☺", "☻", "☹"}},
+ SplitTest{"1 2 3 4", " ", 3, []string{"1", "2", "3 4"}},
+ SplitTest{"1 2 3", " ", 3, []string{"1", "2", "3"}},
+ SplitTest{"1 2", " ", 3, []string{"1", "2"}},
+ SplitTest{"123", "", 2, []string{"1", "23"}},
+ SplitTest{"123", "", 17, []string{"1", "2", "3"}},
}
func TestSplit(t *testing.T) {
@@ -143,14 +147,15 @@ type CopyTest struct {
n int;
res string;
}
-var copytests = []CopyTest {
- CopyTest{ "", "", 0, "" },
- CopyTest{ "a", "", 0, "a" },
- CopyTest{ "a", "a", 1, "a" },
- CopyTest{ "a", "b", 1, "b" },
- CopyTest{ "xyz", "abc", 3, "abc" },
- CopyTest{ "wxyz", "abc", 3, "abcz" },
- CopyTest{ "xyz", "abcd", 3, "abc" },
+
+var copytests = []CopyTest{
+ CopyTest{"", "", 0, ""},
+ CopyTest{"a", "", 0, "a"},
+ CopyTest{"a", "a", 1, "a"},
+ CopyTest{"a", "b", 1, "b"},
+ CopyTest{"xyz", "abc", 3, "abc"},
+ CopyTest{"wxyz", "abc", 3, "abcz"},
+ CopyTest{"xyz", "abcd", 3, "abc"},
}
func TestCopy(t *testing.T) {
@@ -172,7 +177,7 @@ type StringTest struct {
in, out string;
}
-var upperTests = []StringTest {
+var upperTests = []StringTest{
StringTest{"", ""},
StringTest{"abc", "ABC"},
StringTest{"AbC123", "ABC123"},
@@ -180,7 +185,7 @@ var upperTests = []StringTest {
StringTest{"\u0250\u0250\u0250\u0250\u0250", "\u2C6F\u2C6F\u2C6F\u2C6F\u2C6F"}, // grows one byte per char
}
-var lowerTests = []StringTest {
+var lowerTests = []StringTest{
StringTest{"", ""},
StringTest{"abc", "abc"},
StringTest{"AbC123", "abc123"},
@@ -190,10 +195,10 @@ var lowerTests = []StringTest {
const space = "\t\v\r\f\n\u0085\u00a0\u2000\u3000"
-var trimSpaceTests = []StringTest {
+var trimSpaceTests = []StringTest{
StringTest{"", ""},
StringTest{"abc", "abc"},
- StringTest{space + "abc" + space, "abc"},
+ StringTest{space+"abc"+space, "abc"},
StringTest{" ", ""},
StringTest{" \t\r\n \t\t\r\r\n\n ", ""},
StringTest{" \t\r\n x\t\t\r\r\n\n ", "x"},
@@ -227,23 +232,27 @@ func runStringTests(t *testing.T, f func([]byte) []byte, funcName string, testCa
func tenRunes(rune int) string {
r := make([]int, 10);
for i := range r {
- r[i] = rune
+ r[i] = rune;
}
- return string(r)
+ return string(r);
}
func TestMap(t *testing.T) {
// Run a couple of awful growth/shrinkage tests
a := tenRunes('a');
// 1. Grow. This triggers two reallocations in Map.
- maxRune := func(rune int) int { return unicode.MaxRune };
+ maxRune := func(rune int) int {
+ return unicode.MaxRune;
+ };
m := Map(maxRune, Bytes(a));
expect := tenRunes(unicode.MaxRune);
if string(m) != expect {
t.Errorf("growing: expected %q got %q", expect, m);
}
// 2. Shrink
- minRune := func(rune int) int { return 'a' };
+ minRune := func(rune int) int {
+ return 'a';
+ };
m = Map(minRune, Bytes(tenRunes(unicode.MaxRune)));
expect = a;
if string(m) != expect {
@@ -264,24 +273,25 @@ func TestTrimSpace(t *testing.T) {
}
type AddTest struct {
- s, t string;
+ s, t string;
cap int;
}
-var addtests = []AddTest {
- AddTest{ "", "", 0 },
- AddTest{ "a", "", 1 },
- AddTest{ "a", "b", 1 },
- AddTest{ "abc", "def", 100 },
+
+var addtests = []AddTest{
+ AddTest{"", "", 0},
+ AddTest{"a", "", 1},
+ AddTest{"a", "b", 1},
+ AddTest{"abc", "def", 100},
}
func TestAdd(t *testing.T) {
for _, test := range addtests {
b := make([]byte, len(test.s), test.cap);
for i := 0; i < len(test.s); i++ {
- b[i] = test.s[i]
+ b[i] = test.s[i];
}
b = Add(b, strings.Bytes(test.t));
- if string(b) != test.s+test.t {
+ if string(b) != test.s + test.t {
t.Errorf("Add(%q,%q) = %q", test.s, test.t, string(b));
}
}
@@ -291,7 +301,7 @@ func TestAddByte(t *testing.T) {
const N = 2e5;
b := make([]byte, 0);
for i := 0; i < N; i++ {
- b = AddByte(b, byte(i))
+ b = AddByte(b, byte(i));
}
if len(b) != N {
t.Errorf("AddByte: too small; expected %d got %d", N, len(b));
diff --git a/src/pkg/compress/flate/deflate.go b/src/pkg/compress/flate/deflate.go
index 548bff420..79b8ec3ba 100644
--- a/src/pkg/compress/flate/deflate.go
+++ b/src/pkg/compress/flate/deflate.go
@@ -12,67 +12,66 @@ import (
)
const (
- NoCompression = 0;
- BestSpeed = 1;
- fastCompression = 3;
- BestCompression = 9;
- DefaultCompression = -1;
-
- logMaxOffsetSize = 15; // Standard DEFLATE
- wideLogMaxOffsetSize = 22; // Wide DEFLATE
- minMatchLength = 3; // The smallest match that the deflater looks for
- maxMatchLength = 258; // The longest match for the deflater
- minOffsetSize = 1; // The shortest offset that makes any sence
+ NoCompression = 0;
+ BestSpeed = 1;
+ fastCompression = 3;
+ BestCompression = 9;
+ DefaultCompression = -1;
+ logMaxOffsetSize = 15; // Standard DEFLATE
+ wideLogMaxOffsetSize = 22; // Wide DEFLATE
+ minMatchLength = 3; // The smallest match that the deflater looks for
+ maxMatchLength = 258; // The longest match for the deflater
+ minOffsetSize = 1; // The shortest offset that makes any sence
// The maximum number of tokens we put into a single flat block, just too
// stop things from getting too large.
- maxFlateBlockTokens = 1 << 14;
- maxStoreBlockSize = 65535;
- hashBits = 15;
- hashSize = 1 << hashBits;
- hashMask = (1 << hashBits) - 1;
- hashShift = (hashBits + minMatchLength - 1) / minMatchLength;
+ maxFlateBlockTokens = 1<<14;
+ maxStoreBlockSize = 65535;
+ hashBits = 15;
+ hashSize = 1<<hashBits;
+ hashMask = (1<<hashBits)-1;
+ hashShift = (hashBits + minMatchLength - 1) / minMatchLength;
)
type syncPipeReader struct {
*io.PipeReader;
- closeChan chan bool;
+ closeChan chan bool;
}
func (sr *syncPipeReader) CloseWithError(err os.Error) os.Error {
retErr := sr.PipeReader.CloseWithError(err);
- sr.closeChan <- true; // finish writer close
+ sr.closeChan <- true; // finish writer close
return retErr;
}
type syncPipeWriter struct {
*io.PipeWriter;
- closeChan chan bool;
+ closeChan chan bool;
}
type compressionLevel struct {
good, lazy, nice, chain, fastSkipHashing int;
}
-var levels = [] compressionLevel {
- compressionLevel {}, // 0
+var levels = []compressionLevel{
+ compressionLevel{}, // 0
// For levels 1-3 we don't bother trying with lazy matches
- compressionLevel { 3, 0, 8, 4, 4, },
- compressionLevel { 3, 0, 16, 8, 5, },
- compressionLevel { 3, 0, 32, 32, 6 },
+ compressionLevel{3, 0, 8, 4, 4},
+ compressionLevel{3, 0, 16, 8, 5},
+ compressionLevel{3, 0, 32, 32, 6},
// Levels 4-9 use increasingly more lazy matching
// and increasingly stringent conditions for "good enough".
- compressionLevel { 4, 4, 16, 16, math.MaxInt32 },
- compressionLevel { 8, 16, 32, 32, math.MaxInt32 },
- compressionLevel { 8, 16, 128, 128, math.MaxInt32 },
- compressionLevel { 8, 32, 128, 256, math.MaxInt32 },
- compressionLevel { 32, 128, 258, 1024, math.MaxInt32 },
- compressionLevel { 32, 258, 258, 4096, math.MaxInt32 },
+ compressionLevel{4, 4, 16, 16, math.MaxInt32},
+ compressionLevel{8, 16, 32, 32, math.MaxInt32},
+ compressionLevel{8, 16, 128, 128, math.MaxInt32},
+ compressionLevel{8, 32, 128, 256, math.MaxInt32},
+ compressionLevel{32, 128, 258, 1024, math.MaxInt32},
+ compressionLevel{32, 258, 258, 4096, math.MaxInt32},
}
func (sw *syncPipeWriter) Close() os.Error {
err := sw.PipeWriter.Close();
- <-sw.closeChan; // wait for reader close
+ <-sw.closeChan; // wait for reader close
return err;
}
@@ -84,40 +83,40 @@ func syncPipe() (*syncPipeReader, *syncPipeWriter) {
}
type deflater struct {
- level int;
- logWindowSize uint;
- w *huffmanBitWriter;
- r io.Reader;
+ level int;
+ logWindowSize uint;
+ w *huffmanBitWriter;
+ r io.Reader;
// (1 << logWindowSize) - 1.
- windowMask int;
+ windowMask int;
// hashHead[hashValue] contains the largest inputIndex with the specified hash value
- hashHead []int;
+ hashHead []int;
// If hashHead[hashValue] is within the current window, then
// hashPrev[hashHead[hashValue] & windowMask] contains the previous index
// with the same hash value.
- hashPrev []int;
+ hashPrev []int;
// If we find a match of length >= niceMatch, then we don't bother searching
// any further.
- niceMatch int;
+ niceMatch int;
// If we find a match of length >= goodMatch, we only do a half-hearted
// effort at doing lazy matching starting at the next character
- goodMatch int;
+ goodMatch int;
// The maximum number of chains we look at when finding a match
- maxChainLength int;
+ maxChainLength int;
// The sliding window we use for matching
- window []byte;
+ window []byte;
// The index just past the last valid character
- windowEnd int;
+ windowEnd int;
// index in "window" at which current block starts
- blockStart int;
+ blockStart int;
}
func (d *deflater) flush() os.Error {
@@ -127,9 +126,9 @@ func (d *deflater) flush() os.Error {
func (d *deflater) fillWindow(index int) (int, os.Error) {
wSize := d.windowMask + 1;
- if index >= wSize + wSize - (minMatchLength + maxMatchLength) {
+ if index >= wSize+wSize-(minMatchLength + maxMatchLength) {
// shift the window by wSize
- bytes.Copy(d.window, d.window[wSize:2*wSize]);
+ bytes.Copy(d.window, d.window[wSize : 2*wSize]);
index -= wSize;
d.windowEnd -= wSize;
if d.blockStart >= wSize {
@@ -138,10 +137,10 @@ func (d *deflater) fillWindow(index int) (int, os.Error) {
d.blockStart = math.MaxInt32;
}
for i, h := range d.hashHead {
- d.hashHead[i] = max(h - wSize, -1);
+ d.hashHead[i] = max(h-wSize, -1);
}
for i, h := range d.hashPrev {
- d.hashPrev[i] = max(h - wSize, -1);
+ d.hashPrev[i] = max(h-wSize, -1);
}
}
var count int;
@@ -158,7 +157,7 @@ func (d *deflater) writeBlock(tokens []token, index int, eof bool) os.Error {
if index > 0 || eof {
var window []byte;
if d.blockStart <= index {
- window = d.window[d.blockStart:index];
+ window = d.window[d.blockStart : index];
}
d.blockStart = index;
d.w.writeBlock(tokens, eof, window);
@@ -170,10 +169,10 @@ func (d *deflater) writeBlock(tokens []token, index int, eof bool) os.Error {
// Try to find a match starting at index whose length is greater than prevSize.
// We only look at chainCount possibilities before giving up.
func (d *deflater) findMatch(pos int, prevHead int, prevLength int, lookahead int) (length, offset int, ok bool) {
- win := d.window[0:pos+min(maxMatchLength, lookahead)];
+ win := d.window[0 : pos + min(maxMatchLength, lookahead)];
// We quit when we get a match that's at least nice long
- nice := min(d.niceMatch, len(win) - pos);
+ nice := min(d.niceMatch, len(win)-pos);
// If we've got a match that's good enough, only look in 1/4 the chain.
tries := d.maxChainLength;
@@ -183,21 +182,21 @@ func (d *deflater) findMatch(pos int, prevHead int, prevLength int, lookahead in
}
w0 := win[pos];
- w1 := win[pos + 1];
- wEnd := win[pos + length];
- minIndex := pos - (d.windowMask + 1);
+ w1 := win[pos+1];
+ wEnd := win[pos+length];
+ minIndex := pos-(d.windowMask + 1);
for i := prevHead; tries > 0; tries-- {
if w0 == win[i] && w1 == win[i+1] && wEnd == win[i+length] {
// The hash function ensures that if win[i] and win[i+1] match, win[i+2] matches
n := 3;
- for pos + n < len(win) && win[i+n] == win[pos+n] {
+ for pos+n < len(win) && win[i+n] == win[pos+n] {
n++;
}
if n > length && (n > 3 || pos-i <= 4096) {
length = n;
- offset = pos - i;
+ offset = pos-i;
ok = true;
if n >= nice {
// The match is good enough that we don't try to find a better one.
@@ -246,7 +245,7 @@ func (d *deflater) storedDeflate() os.Error {
func (d *deflater) doDeflate() (err os.Error) {
// init
- d.windowMask = 1<<d.logWindowSize - 1;
+ d.windowMask = 1 << d.logWindowSize - 1;
d.hashHead = make([]int, hashSize);
d.hashPrev = make([]int, 1 << d.logWindowSize);
d.window = make([]byte, 2 << d.logWindowSize);
@@ -266,7 +265,7 @@ func (d *deflater) doDeflate() (err os.Error) {
if index, err = d.fillWindow(index); err != nil {
return;
}
- maxOffset := d.windowMask + 1; // (1 << logWindowSize);
+ maxOffset := d.windowMask + 1; // (1 << logWindowSize);
// only need to change when you refill the window
windowEnd := d.windowEnd;
maxInsertIndex := windowEnd - (minMatchLength - 1);
@@ -274,7 +273,7 @@ func (d *deflater) doDeflate() (err os.Error) {
hash := int(0);
if index < maxInsertIndex {
- hash = int(d.window[index])<<hashShift + int(d.window[index+1]);
+ hash = int(d.window[index]) << hashShift + int(d.window[index+1]);
}
chainHead := -1;
for {
@@ -298,7 +297,7 @@ func (d *deflater) doDeflate() (err os.Error) {
}
if index < maxInsertIndex {
// Update the hash
- hash = (hash<<hashShift + int(d.window[index+2])) & hashMask;
+ hash = (hash << hashShift + int(d.window[index+2]))&hashMask;
chainHead = d.hashHead[hash];
d.hashPrev[index & d.windowMask] = chainHead;
d.hashHead[hash] = index;
@@ -311,8 +310,8 @@ func (d *deflater) doDeflate() (err os.Error) {
if chainHead >= minIndex &&
(isFastDeflate && lookahead > minMatchLength - 1 ||
- !isFastDeflate && lookahead > prevLength && prevLength < lazyMatch) {
- if newLength, newOffset, ok := d.findMatch(index, chainHead, minMatchLength -1 , lookahead); ok {
+ !isFastDeflate && lookahead > prevLength && prevLength < lazyMatch) {
+ if newLength, newOffset, ok := d.findMatch(index, chainHead, minMatchLength - 1, lookahead); ok {
length = newLength;
offset = newOffset;
}
@@ -334,13 +333,13 @@ func (d *deflater) doDeflate() (err os.Error) {
if length <= l.fastSkipHashing {
var newIndex int;
if isFastDeflate {
- newIndex = index + length;
+ newIndex = index+length;
} else {
newIndex = prevLength - 1;
}
for index++; index < newIndex; index++ {
if index < maxInsertIndex {
- hash = (hash<<hashShift + int(d.window[index+2])) & hashMask;
+ hash = (hash << hashShift + int(d.window[index+2]))&hashMask;
// Get previous value with the same hash.
// Our chain should point to the previous value.
d.hashPrev[index & d.windowMask] = d.hashHead[hash];
@@ -356,7 +355,7 @@ func (d *deflater) doDeflate() (err os.Error) {
// For matches this long, we don't bother inserting each individual
// item into the table.
index += length;
- hash = (int(d.window[index])<<hashShift + int(d.window[index+1]));
+ hash = (int(d.window[index]) << hashShift + int(d.window[index+1]));
}
if ti == maxFlateBlockTokens {
// The block includes the current character
@@ -367,11 +366,11 @@ func (d *deflater) doDeflate() (err os.Error) {
}
} else {
if isFastDeflate || byteAvailable {
- i := index - 1;
+ i := index-1;
if isFastDeflate {
i = index;
}
- tokens[ti] = literalToken(uint32(d.window[i]) & 0xFF);
+ tokens[ti] = literalToken(uint32(d.window[i])&0xFF);
ti++;
if ti == maxFlateBlockTokens {
if err = d.writeBlock(tokens, i+1, false); err != nil {
@@ -389,7 +388,7 @@ func (d *deflater) doDeflate() (err os.Error) {
}
if byteAvailable {
// There is still one pending token that needs to be flushed
- tokens[ti] = literalToken(uint32(d.window[index - 1]) & 0xFF);
+ tokens[ti] = literalToken(uint32(d.window[index-1])&0xFF);
ti++;
}
@@ -416,7 +415,7 @@ func (d *deflater) deflater(r io.Reader, w io.Writer, level int, logWindowSize u
case 1 <= level && level <= 9:
err = d.doDeflate();
default:
- return WrongValueError { "level", 0, 9, int32(level) };
+ return WrongValueError{"level", 0, 9, int32(level)};
}
if err != nil {
diff --git a/src/pkg/compress/flate/deflate_test.go b/src/pkg/compress/flate/deflate_test.go
index 24042a278..3f6c60bf5 100644
--- a/src/pkg/compress/flate/deflate_test.go
+++ b/src/pkg/compress/flate/deflate_test.go
@@ -13,65 +13,66 @@ import (
)
type deflateTest struct {
- in []byte;
- level int;
- out []byte;
+ in []byte;
+ level int;
+ out []byte;
}
type deflateInflateTest struct {
- in [] byte;
+ in []byte;
}
type reverseBitsTest struct {
- in uint16;
- bitCount uint8;
- out uint16;
+ in uint16;
+ bitCount uint8;
+ out uint16;
}
-var deflateTests = []*deflateTest {
- &deflateTest { []byte{ }, 0, []byte{ 1, 0, 0, 255, 255 } },
- &deflateTest { []byte{ 0x11 }, -1, []byte{ 18, 4, 4, 0, 0, 255, 255 } },
- &deflateTest { []byte{ 0x11 }, DefaultCompression, []byte{ 18, 4, 4, 0, 0, 255, 255 } },
- &deflateTest { []byte{ 0x11 }, 4, []byte{ 18, 4, 4, 0, 0, 255, 255 } },
+var deflateTests = []*deflateTest{
+ &deflateTest{[]byte{}, 0, []byte{1, 0, 0, 255, 255}},
+ &deflateTest{[]byte{0x11}, -1, []byte{18, 4, 4, 0, 0, 255, 255}},
+ &deflateTest{[]byte{0x11}, DefaultCompression, []byte{18, 4, 4, 0, 0, 255, 255}},
+ &deflateTest{[]byte{0x11}, 4, []byte{18, 4, 4, 0, 0, 255, 255}},
- &deflateTest { []byte{ 0x11 }, 0, []byte { 0, 1, 0, 254, 255, 17, 1, 0, 0, 255, 255 } },
- &deflateTest { []byte{ 0x11, 0x12 }, 0, []byte{ 0, 2, 0, 253, 255, 17, 18, 1, 0, 0, 255, 255 } },
- &deflateTest { []byte{ 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11 }, 0,
- []byte{ 0, 8, 0, 247, 255, 17, 17, 17, 17, 17, 17, 17, 17, 1, 0, 0, 255, 255 } },
- &deflateTest { []byte{}, 1, []byte{ 1, 0, 0, 255, 255 } },
- &deflateTest { []byte{ 0x11 }, 1, []byte{ 18, 4, 4, 0, 0, 255, 255 } },
- &deflateTest { []byte{ 0x11, 0x12 }, 1, []byte{ 18, 20, 2, 4, 0, 0, 255, 255 } },
- &deflateTest { []byte{ 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11 }, 1, []byte{ 18, 132, 2, 64, 0, 0, 0, 255, 255 } },
- &deflateTest { []byte{}, 9, []byte{ 1, 0, 0, 255, 255 } },
- &deflateTest { []byte{ 0x11 }, 9, []byte{ 18, 4, 4, 0, 0, 255, 255 } },
- &deflateTest { []byte{ 0x11, 0x12 }, 9, []byte{ 18, 20, 2, 4, 0, 0, 255, 255 } },
- &deflateTest { []byte{ 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11 }, 9, []byte{ 18, 132, 2, 64, 0, 0, 0, 255, 255 } },
+ &deflateTest{[]byte{0x11}, 0, []byte{0, 1, 0, 254, 255, 17, 1, 0, 0, 255, 255}},
+ &deflateTest{[]byte{0x11, 0x12}, 0, []byte{0, 2, 0, 253, 255, 17, 18, 1, 0, 0, 255, 255}},
+ &deflateTest{[]byte{0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11}, 0,
+ []byte{0, 8, 0, 247, 255, 17, 17, 17, 17, 17, 17, 17, 17, 1, 0, 0, 255, 255},
+ },
+ &deflateTest{[]byte{}, 1, []byte{1, 0, 0, 255, 255}},
+ &deflateTest{[]byte{0x11}, 1, []byte{18, 4, 4, 0, 0, 255, 255}},
+ &deflateTest{[]byte{0x11, 0x12}, 1, []byte{18, 20, 2, 4, 0, 0, 255, 255}},
+ &deflateTest{[]byte{0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11}, 1, []byte{18, 132, 2, 64, 0, 0, 0, 255, 255}},
+ &deflateTest{[]byte{}, 9, []byte{1, 0, 0, 255, 255}},
+ &deflateTest{[]byte{0x11}, 9, []byte{18, 4, 4, 0, 0, 255, 255}},
+ &deflateTest{[]byte{0x11, 0x12}, 9, []byte{18, 20, 2, 4, 0, 0, 255, 255}},
+ &deflateTest{[]byte{0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11}, 9, []byte{18, 132, 2, 64, 0, 0, 0, 255, 255}},
}
-var deflateInflateTests = []*deflateInflateTest {
- &deflateInflateTest { []byte{ } },
- &deflateInflateTest { []byte{ 0x11 } },
- &deflateInflateTest { []byte{ 0x11, 0x12 } },
- &deflateInflateTest { []byte{ 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11 } },
- &deflateInflateTest { []byte{ 0x11, 0x10, 0x13, 0x41, 0x21, 0x21, 0x41, 0x13, 0x87, 0x78, 0x13 } },
- &deflateInflateTest { getLargeDataChunk() },
+var deflateInflateTests = []*deflateInflateTest{
+ &deflateInflateTest{[]byte{}},
+ &deflateInflateTest{[]byte{0x11}},
+ &deflateInflateTest{[]byte{0x11, 0x12}},
+ &deflateInflateTest{[]byte{0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11}},
+ &deflateInflateTest{[]byte{0x11, 0x10, 0x13, 0x41, 0x21, 0x21, 0x41, 0x13, 0x87, 0x78, 0x13}},
+ &deflateInflateTest{getLargeDataChunk()},
}
-var reverseBitsTests = []*reverseBitsTest {
- &reverseBitsTest { 1, 1, 1 },
- &reverseBitsTest { 1, 2, 2 },
- &reverseBitsTest { 1, 3, 4 },
- &reverseBitsTest { 1, 4, 8 },
- &reverseBitsTest { 1, 5, 16 },
- &reverseBitsTest { 17, 5, 17 },
- &reverseBitsTest { 257, 9, 257 },
- &reverseBitsTest { 29, 5, 23 },
+var reverseBitsTests = []*reverseBitsTest{
+ &reverseBitsTest{1, 1, 1},
+ &reverseBitsTest{1, 2, 2},
+ &reverseBitsTest{1, 3, 4},
+ &reverseBitsTest{1, 4, 8},
+ &reverseBitsTest{1, 5, 16},
+ &reverseBitsTest{17, 5, 17},
+ &reverseBitsTest{257, 9, 257},
+ &reverseBitsTest{29, 5, 23},
}
func getLargeDataChunk() []byte {
result := make([]byte, 100000);
for i := range result {
- result[i] = byte(int64(i) * int64(i) & 0xFF);
+ result[i] = byte(int64(i)*int64(i)&0xFF);
}
return result;
}
@@ -107,7 +108,7 @@ func testToFromWithLevel(t *testing.T, level int, input []byte, name string) os.
return nil;
}
-func testToFrom(t *testing.T, input[] byte, name string) {
+func testToFrom(t *testing.T, input []byte, name string) {
for i := 0; i < 10; i++ {
testToFromWithLevel(t, i, input, name);
}
@@ -134,130 +135,130 @@ func TestDeflateInflateString(t *testing.T) {
}
func getEdata() string {
- return "2.718281828459045235360287471352662497757247093699959574966967627724076630353547"+
- "59457138217852516642742746639193200305992181741359662904357290033429526059563073"+
- "81323286279434907632338298807531952510190115738341879307021540891499348841675092"+
- "44761460668082264800168477411853742345442437107539077744992069551702761838606261"+
- "33138458300075204493382656029760673711320070932870912744374704723069697720931014"+
- "16928368190255151086574637721112523897844250569536967707854499699679468644549059"+
- "87931636889230098793127736178215424999229576351482208269895193668033182528869398"+
- "49646510582093923982948879332036250944311730123819706841614039701983767932068328"+
- "23764648042953118023287825098194558153017567173613320698112509961818815930416903"+
- "51598888519345807273866738589422879228499892086805825749279610484198444363463244"+
- "96848756023362482704197862320900216099023530436994184914631409343173814364054625"+
- "31520961836908887070167683964243781405927145635490613031072085103837505101157477"+
- "04171898610687396965521267154688957035035402123407849819334321068170121005627880"+
- "23519303322474501585390473041995777709350366041699732972508868769664035557071622"+
- "68447162560798826517871341951246652010305921236677194325278675398558944896970964"+
- "09754591856956380236370162112047742722836489613422516445078182442352948636372141"+
- "74023889344124796357437026375529444833799801612549227850925778256209262264832627"+
- "79333865664816277251640191059004916449982893150566047258027786318641551956532442"+
- "58698294695930801915298721172556347546396447910145904090586298496791287406870504"+
- "89585867174798546677575732056812884592054133405392200011378630094556068816674001"+
- "69842055804033637953764520304024322566135278369511778838638744396625322498506549"+
- "95886234281899707733276171783928034946501434558897071942586398772754710962953741"+
- "52111513683506275260232648472870392076431005958411661205452970302364725492966693"+
- "81151373227536450988890313602057248176585118063036442812314965507047510254465011"+
- "72721155519486685080036853228183152196003735625279449515828418829478761085263981"+
- "39559900673764829224437528718462457803619298197139914756448826260390338144182326"+
- "25150974827987779964373089970388867782271383605772978824125611907176639465070633"+
- "04527954661855096666185664709711344474016070462621568071748187784437143698821855"+
- "96709591025968620023537185887485696522000503117343920732113908032936344797273559"+
- "55277349071783793421637012050054513263835440001863239914907054797780566978533580"+
- "48966906295119432473099587655236812859041383241160722602998330535370876138939639"+
- "17795745401613722361878936526053815584158718692553860616477983402543512843961294"+
- "60352913325942794904337299085731580290958631382683291477116396337092400316894586"+
- "36060645845925126994655724839186564209752685082307544254599376917041977780085362"+
- "73094171016343490769642372229435236612557250881477922315197477806056967253801718"+
- "07763603462459278778465850656050780844211529697521890874019660906651803516501792"+
- "50461950136658543663271254963990854914420001457476081930221206602433009641270489"+
- "43903971771951806990869986066365832322787093765022601492910115171776359446020232"+
- "49300280401867723910288097866605651183260043688508817157238669842242201024950551"+
- "88169480322100251542649463981287367765892768816359831247788652014117411091360116"+
- "49950766290779436460058519419985601626479076153210387275571269925182756879893027"+
- "61761146162549356495903798045838182323368612016243736569846703785853305275833337"+
- "93990752166069238053369887956513728559388349989470741618155012539706464817194670"+
- "83481972144888987906765037959036696724949925452790337296361626589760394985767413"+
- "97359441023744329709355477982629614591442936451428617158587339746791897571211956"+
- "18738578364475844842355558105002561149239151889309946342841393608038309166281881"+
- "15037152849670597416256282360921680751501777253874025642534708790891372917228286"+
- "11515915683725241630772254406337875931059826760944203261924285317018781772960235"+
- "41306067213604600038966109364709514141718577701418060644363681546444005331608778"+
- "31431744408119494229755993140118886833148328027065538330046932901157441475631399"+
- "97221703804617092894579096271662260740718749975359212756084414737823303270330168"+
- "23719364800217328573493594756433412994302485023573221459784328264142168487872167"+
- "33670106150942434569844018733128101079451272237378861260581656680537143961278887"+
- "32527373890392890506865324138062796025930387727697783792868409325365880733988457"+
- "21874602100531148335132385004782716937621800490479559795929059165547050577751430"+
- "81751126989851884087185640260353055837378324229241856256442550226721559802740126"+
- "17971928047139600689163828665277009752767069777036439260224372841840883251848770"+
- "47263844037953016690546593746161932384036389313136432713768884102681121989127522"+
- "30562567562547017250863497653672886059667527408686274079128565769963137897530346"+
- "60616669804218267724560530660773899624218340859882071864682623215080288286359746"+
- "83965435885668550377313129658797581050121491620765676995065971534476347032085321"+
- "56036748286083786568030730626576334697742956346437167093971930608769634953288468"+
- "33613038829431040800296873869117066666146800015121143442256023874474325250769387"+
- "07777519329994213727721125884360871583483562696166198057252661220679754062106208"+
- "06498829184543953015299820925030054982570433905535701686531205264956148572492573"+
- "86206917403695213533732531666345466588597286659451136441370331393672118569553952"+
- "10845840724432383558606310680696492485123263269951460359603729725319836842336390"+
- "46321367101161928217111502828016044880588023820319814930963695967358327420249882"+
- "45684941273860566491352526706046234450549227581151709314921879592718001940968866"+
- "98683703730220047531433818109270803001720593553052070070607223399946399057131158"+
- "70996357773590271962850611465148375262095653467132900259943976631145459026858989"+
- "79115837093419370441155121920117164880566945938131183843765620627846310490346293"+
- "95002945834116482411496975832601180073169943739350696629571241027323913874175492"+
- "30718624545432220395527352952402459038057445028922468862853365422138157221311632"+
- "88112052146489805180092024719391710555390113943316681515828843687606961102505171"+
- "00739276238555338627255353883096067164466237092264680967125406186950214317621166"+
- "81400975952814939072226011126811531083873176173232352636058381731510345957365382"+
- "23534992935822836851007810884634349983518404451704270189381994243410090575376257"+
- "76757111809008816418331920196262341628816652137471732547772778348877436651882875"+
- "21566857195063719365653903894493664217640031215278702223664636357555035655769488"+
- "86549500270853923617105502131147413744106134445544192101336172996285694899193369"+
- "18472947858072915608851039678195942983318648075608367955149663644896559294818785"+
- "17840387733262470519450504198477420141839477312028158868457072905440575106012852"+
- "58056594703046836344592652552137008068752009593453607316226118728173928074623094"+
- "68536782310609792159936001994623799343421068781349734695924646975250624695861690"+
- "91785739765951993929939955675427146549104568607020990126068187049841780791739240"+
- "71945996323060254707901774527513186809982284730860766536866855516467702911336827"+
- "56310722334672611370549079536583453863719623585631261838715677411873852772292259"+
- "47433737856955384562468010139057278710165129666367644518724656537304024436841408"+
- "14488732957847348490003019477888020460324660842875351848364959195082888323206522"+
- "12810419044804724794929134228495197002260131043006241071797150279343326340799596"+
- "05314460532304885289729176598760166678119379323724538572096075822771784833616135"+
- "82612896226118129455927462767137794487586753657544861407611931125958512655759734"+
- "57301533364263076798544338576171533346232527057200530398828949903425956623297578"+
- "24887350292591668258944568946559926584547626945287805165017206747854178879822768"+
- "06536650641910973434528878338621726156269582654478205672987756426325321594294418"+
- "03994321700009054265076309558846589517170914760743713689331946909098190450129030"+
- "70995662266203031826493657336984195557769637876249188528656866076005660256054457"+
- "11337286840205574416030837052312242587223438854123179481388550075689381124935386"+
- "31863528708379984569261998179452336408742959118074745341955142035172618420084550"+
- "91708456823682008977394558426792142734775608796442792027083121501564063413416171"+
- "66448069815483764491573900121217041547872591998943825364950514771379399147205219"+
- "52907939613762110723849429061635760459623125350606853765142311534966568371511660"+
- "42207963944666211632551577290709784731562782775987881364919512574833287937715714"+
- "59091064841642678309949723674420175862269402159407924480541255360431317992696739"+
- "15754241929660731239376354213923061787675395871143610408940996608947141834069836"+
- "29936753626215452472984642137528910798843813060955526227208375186298370667872244"+
- "30195793793786072107254277289071732854874374355781966511716618330881129120245204"+
- "04868220007234403502544820283425418788465360259150644527165770004452109773558589"+
- "76226554849416217149895323834216001140629507184904277892585527430352213968356790"+
- "18076406042138307308774460170842688272261177180842664333651780002171903449234264"+
- "26629226145600433738386833555534345300426481847398921562708609565062934040526494"+
- "32442614456659212912256488935696550091543064261342526684725949143142393988454324"+
- "86327461842846655985332312210466259890141712103446084271616619001257195870793217"+
- "56969854401339762209674945418540711844643394699016269835160784892451405894094639"+
- "52678073545797003070511636825194877011897640028276484141605872061841852971891540"+
- "19688253289309149665345753571427318482016384644832499037886069008072709327673127"+
- "58196656394114896171683298045513972950668760474091542042842999354102582911350224"+
- "16907694316685742425225090269390348148564513030699251995904363840284292674125734"+
- "22447765584177886171737265462085498294498946787350929581652632072258992368768457"+
- "01782303809656788311228930580914057261086588484587310165815116753332767488701482"+
- "91674197015125597825727074064318086014281490241467804723275976842696339357735429"+
- "30186739439716388611764209004068663398856841681003872389214483176070116684503887"+
- "21236436704331409115573328018297798873659091665961240202177855885487617616198937"+
- "07943800566633648843650891448055710397652146960276625835990519870423001794655367"+
- "9";
+ return "2.718281828459045235360287471352662497757247093699959574966967627724076630353547" +
+ "59457138217852516642742746639193200305992181741359662904357290033429526059563073" +
+ "81323286279434907632338298807531952510190115738341879307021540891499348841675092" +
+ "44761460668082264800168477411853742345442437107539077744992069551702761838606261" +
+ "33138458300075204493382656029760673711320070932870912744374704723069697720931014" +
+ "16928368190255151086574637721112523897844250569536967707854499699679468644549059" +
+ "87931636889230098793127736178215424999229576351482208269895193668033182528869398" +
+ "49646510582093923982948879332036250944311730123819706841614039701983767932068328" +
+ "23764648042953118023287825098194558153017567173613320698112509961818815930416903" +
+ "51598888519345807273866738589422879228499892086805825749279610484198444363463244" +
+ "96848756023362482704197862320900216099023530436994184914631409343173814364054625" +
+ "31520961836908887070167683964243781405927145635490613031072085103837505101157477" +
+ "04171898610687396965521267154688957035035402123407849819334321068170121005627880" +
+ "23519303322474501585390473041995777709350366041699732972508868769664035557071622" +
+ "68447162560798826517871341951246652010305921236677194325278675398558944896970964" +
+ "09754591856956380236370162112047742722836489613422516445078182442352948636372141" +
+ "74023889344124796357437026375529444833799801612549227850925778256209262264832627" +
+ "79333865664816277251640191059004916449982893150566047258027786318641551956532442" +
+ "58698294695930801915298721172556347546396447910145904090586298496791287406870504" +
+ "89585867174798546677575732056812884592054133405392200011378630094556068816674001" +
+ "69842055804033637953764520304024322566135278369511778838638744396625322498506549" +
+ "95886234281899707733276171783928034946501434558897071942586398772754710962953741" +
+ "52111513683506275260232648472870392076431005958411661205452970302364725492966693" +
+ "81151373227536450988890313602057248176585118063036442812314965507047510254465011" +
+ "72721155519486685080036853228183152196003735625279449515828418829478761085263981" +
+ "39559900673764829224437528718462457803619298197139914756448826260390338144182326" +
+ "25150974827987779964373089970388867782271383605772978824125611907176639465070633" +
+ "04527954661855096666185664709711344474016070462621568071748187784437143698821855" +
+ "96709591025968620023537185887485696522000503117343920732113908032936344797273559" +
+ "55277349071783793421637012050054513263835440001863239914907054797780566978533580" +
+ "48966906295119432473099587655236812859041383241160722602998330535370876138939639" +
+ "17795745401613722361878936526053815584158718692553860616477983402543512843961294" +
+ "60352913325942794904337299085731580290958631382683291477116396337092400316894586" +
+ "36060645845925126994655724839186564209752685082307544254599376917041977780085362" +
+ "73094171016343490769642372229435236612557250881477922315197477806056967253801718" +
+ "07763603462459278778465850656050780844211529697521890874019660906651803516501792" +
+ "50461950136658543663271254963990854914420001457476081930221206602433009641270489" +
+ "43903971771951806990869986066365832322787093765022601492910115171776359446020232" +
+ "49300280401867723910288097866605651183260043688508817157238669842242201024950551" +
+ "88169480322100251542649463981287367765892768816359831247788652014117411091360116" +
+ "49950766290779436460058519419985601626479076153210387275571269925182756879893027" +
+ "61761146162549356495903798045838182323368612016243736569846703785853305275833337" +
+ "93990752166069238053369887956513728559388349989470741618155012539706464817194670" +
+ "83481972144888987906765037959036696724949925452790337296361626589760394985767413" +
+ "97359441023744329709355477982629614591442936451428617158587339746791897571211956" +
+ "18738578364475844842355558105002561149239151889309946342841393608038309166281881" +
+ "15037152849670597416256282360921680751501777253874025642534708790891372917228286" +
+ "11515915683725241630772254406337875931059826760944203261924285317018781772960235" +
+ "41306067213604600038966109364709514141718577701418060644363681546444005331608778" +
+ "31431744408119494229755993140118886833148328027065538330046932901157441475631399" +
+ "97221703804617092894579096271662260740718749975359212756084414737823303270330168" +
+ "23719364800217328573493594756433412994302485023573221459784328264142168487872167" +
+ "33670106150942434569844018733128101079451272237378861260581656680537143961278887" +
+ "32527373890392890506865324138062796025930387727697783792868409325365880733988457" +
+ "21874602100531148335132385004782716937621800490479559795929059165547050577751430" +
+ "81751126989851884087185640260353055837378324229241856256442550226721559802740126" +
+ "17971928047139600689163828665277009752767069777036439260224372841840883251848770" +
+ "47263844037953016690546593746161932384036389313136432713768884102681121989127522" +
+ "30562567562547017250863497653672886059667527408686274079128565769963137897530346" +
+ "60616669804218267724560530660773899624218340859882071864682623215080288286359746" +
+ "83965435885668550377313129658797581050121491620765676995065971534476347032085321" +
+ "56036748286083786568030730626576334697742956346437167093971930608769634953288468" +
+ "33613038829431040800296873869117066666146800015121143442256023874474325250769387" +
+ "07777519329994213727721125884360871583483562696166198057252661220679754062106208" +
+ "06498829184543953015299820925030054982570433905535701686531205264956148572492573" +
+ "86206917403695213533732531666345466588597286659451136441370331393672118569553952" +
+ "10845840724432383558606310680696492485123263269951460359603729725319836842336390" +
+ "46321367101161928217111502828016044880588023820319814930963695967358327420249882" +
+ "45684941273860566491352526706046234450549227581151709314921879592718001940968866" +
+ "98683703730220047531433818109270803001720593553052070070607223399946399057131158" +
+ "70996357773590271962850611465148375262095653467132900259943976631145459026858989" +
+ "79115837093419370441155121920117164880566945938131183843765620627846310490346293" +
+ "95002945834116482411496975832601180073169943739350696629571241027323913874175492" +
+ "30718624545432220395527352952402459038057445028922468862853365422138157221311632" +
+ "88112052146489805180092024719391710555390113943316681515828843687606961102505171" +
+ "00739276238555338627255353883096067164466237092264680967125406186950214317621166" +
+ "81400975952814939072226011126811531083873176173232352636058381731510345957365382" +
+ "23534992935822836851007810884634349983518404451704270189381994243410090575376257" +
+ "76757111809008816418331920196262341628816652137471732547772778348877436651882875" +
+ "21566857195063719365653903894493664217640031215278702223664636357555035655769488" +
+ "86549500270853923617105502131147413744106134445544192101336172996285694899193369" +
+ "18472947858072915608851039678195942983318648075608367955149663644896559294818785" +
+ "17840387733262470519450504198477420141839477312028158868457072905440575106012852" +
+ "58056594703046836344592652552137008068752009593453607316226118728173928074623094" +
+ "68536782310609792159936001994623799343421068781349734695924646975250624695861690" +
+ "91785739765951993929939955675427146549104568607020990126068187049841780791739240" +
+ "71945996323060254707901774527513186809982284730860766536866855516467702911336827" +
+ "56310722334672611370549079536583453863719623585631261838715677411873852772292259" +
+ "47433737856955384562468010139057278710165129666367644518724656537304024436841408" +
+ "14488732957847348490003019477888020460324660842875351848364959195082888323206522" +
+ "12810419044804724794929134228495197002260131043006241071797150279343326340799596" +
+ "05314460532304885289729176598760166678119379323724538572096075822771784833616135" +
+ "82612896226118129455927462767137794487586753657544861407611931125958512655759734" +
+ "57301533364263076798544338576171533346232527057200530398828949903425956623297578" +
+ "24887350292591668258944568946559926584547626945287805165017206747854178879822768" +
+ "06536650641910973434528878338621726156269582654478205672987756426325321594294418" +
+ "03994321700009054265076309558846589517170914760743713689331946909098190450129030" +
+ "70995662266203031826493657336984195557769637876249188528656866076005660256054457" +
+ "11337286840205574416030837052312242587223438854123179481388550075689381124935386" +
+ "31863528708379984569261998179452336408742959118074745341955142035172618420084550" +
+ "91708456823682008977394558426792142734775608796442792027083121501564063413416171" +
+ "66448069815483764491573900121217041547872591998943825364950514771379399147205219" +
+ "52907939613762110723849429061635760459623125350606853765142311534966568371511660" +
+ "42207963944666211632551577290709784731562782775987881364919512574833287937715714" +
+ "59091064841642678309949723674420175862269402159407924480541255360431317992696739" +
+ "15754241929660731239376354213923061787675395871143610408940996608947141834069836" +
+ "29936753626215452472984642137528910798843813060955526227208375186298370667872244" +
+ "30195793793786072107254277289071732854874374355781966511716618330881129120245204" +
+ "04868220007234403502544820283425418788465360259150644527165770004452109773558589" +
+ "76226554849416217149895323834216001140629507184904277892585527430352213968356790" +
+ "18076406042138307308774460170842688272261177180842664333651780002171903449234264" +
+ "26629226145600433738386833555534345300426481847398921562708609565062934040526494" +
+ "32442614456659212912256488935696550091543064261342526684725949143142393988454324" +
+ "86327461842846655985332312210466259890141712103446084271616619001257195870793217" +
+ "56969854401339762209674945418540711844643394699016269835160784892451405894094639" +
+ "52678073545797003070511636825194877011897640028276484141605872061841852971891540" +
+ "19688253289309149665345753571427318482016384644832499037886069008072709327673127" +
+ "58196656394114896171683298045513972950668760474091542042842999354102582911350224" +
+ "16907694316685742425225090269390348148564513030699251995904363840284292674125734" +
+ "22447765584177886171737265462085498294498946787350929581652632072258992368768457" +
+ "01782303809656788311228930580914057261086588484587310165815116753332767488701482" +
+ "91674197015125597825727074064318086014281490241467804723275976842696339357735429" +
+ "30186739439716388611764209004068663398856841681003872389214483176070116684503887" +
+ "21236436704331409115573328018297798873659091665961240202177855885487617616198937" +
+ "07943800566633648843650891448055710397652146960276625835990519870423001794655367" +
+ "9";
}
diff --git a/src/pkg/compress/flate/flate_test.go b/src/pkg/compress/flate/flate_test.go
index ba5b0235e..ab7ce250b 100644
--- a/src/pkg/compress/flate/flate_test.go
+++ b/src/pkg/compress/flate/flate_test.go
@@ -56,8 +56,8 @@ var initDecoderTests = []*InitDecoderTest{
[]int{3, 5, 2, 4, 3, 5, 5, 4, 4, 3, 4, 5},
huffmanDecoder{
2, 5,
- [maxCodeLen+1]int{2: 0, 4, 13, 31},
- [maxCodeLen+1]int{2: 0, 1, 6, 20},
+ [maxCodeLen + 1]int{2: 0, 4, 13, 31},
+ [maxCodeLen + 1]int{2: 0, 1, 6, 20},
// Paper used different code assignment:
// 2, 9, 4, 0, 10, 8, 3, 7, 1, 5, 11, 6
// Reordered here so that codes of same length
@@ -72,8 +72,8 @@ var initDecoderTests = []*InitDecoderTest{
[]int{2, 1, 3, 3},
huffmanDecoder{
1, 3,
- [maxCodeLen+1]int{1: 0, 2, 7},
- [maxCodeLen+1]int{1: 0, 1, 4},
+ [maxCodeLen + 1]int{1: 0, 2, 7},
+ [maxCodeLen + 1]int{1: 0, 1, 4},
[]int{1, 0, 2, 3},
},
true,
@@ -84,8 +84,8 @@ var initDecoderTests = []*InitDecoderTest{
[]int{3, 3, 3, 3, 3, 2, 4, 4},
huffmanDecoder{
2, 4,
- [maxCodeLen+1]int{2: 0, 6, 15},
- [maxCodeLen+1]int{2: 0, 1, 8},
+ [maxCodeLen + 1]int{2: 0, 6, 15},
+ [maxCodeLen + 1]int{2: 0, 1, 8},
[]int{5, 0, 1, 2, 3, 4, 6, 7},
},
true,
diff --git a/src/pkg/compress/flate/huffman_bit_writer.go b/src/pkg/compress/flate/huffman_bit_writer.go
index dbf59f2ae..74c21bd2f 100644
--- a/src/pkg/compress/flate/huffman_bit_writer.go
+++ b/src/pkg/compress/flate/huffman_bit_writer.go
@@ -13,48 +13,47 @@ import (
const (
// The largest offset code.
- offsetCodeCount = 30;
+ offsetCodeCount = 30;
// The largest offset code in the extensions.
- extendedOffsetCodeCount = 42;
+ extendedOffsetCodeCount = 42;
// The special code used to mark the end of a block.
- endBlockMarker = 256;
+ endBlockMarker = 256;
// The first length code.
- lengthCodesStart = 257;
+ lengthCodesStart = 257;
// The number of codegen codes.
- codegenCodeCount = 19;
-
- badCode = 255;
+ codegenCodeCount = 19;
+ badCode = 255;
)
// The number of extra bits needed by length code X - LENGTH_CODES_START.
-var lengthExtraBits = []int8 {
- /* 257 */ 0, 0, 0,
- /* 260 */ 0, 0, 0, 0, 0, 1, 1, 1, 1, 2,
- /* 270 */ 2, 2, 2, 3, 3, 3, 3, 4, 4, 4,
- /* 280 */ 4, 5, 5, 5, 5, 0,
+var lengthExtraBits = []int8{
+ /* 257 */0, 0, 0,
+ /* 260 */0, 0, 0, 0, 0, 1, 1, 1, 1, 2,
+ /* 270 */2, 2, 2, 3, 3, 3, 3, 4, 4, 4,
+ /* 280 */4, 5, 5, 5, 5, 0,
}
// The length indicated by length code X - LENGTH_CODES_START.
-var lengthBase = []uint32 {
- 0, 1, 2, 3, 4, 5, 6, 7, 8, 10,
- 12, 14, 16, 20, 24, 28, 32, 40, 48, 56,
- 64, 80, 96, 112, 128, 160, 192, 224, 255
+var lengthBase = []uint32{
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 10,
+ 12, 14, 16, 20, 24, 28, 32, 40, 48, 56,
+ 64, 80, 96, 112, 128, 160, 192, 224, 255,
}
// offset code word extra bits.
-var offsetExtraBits = []int8 {
- 0, 0, 0, 0, 1, 1, 2, 2, 3, 3,
- 4, 4, 5, 5, 6, 6, 7, 7, 8, 8,
- 9, 9, 10, 10, 11, 11, 12, 12, 13, 13,
+var offsetExtraBits = []int8{
+ 0, 0, 0, 0, 1, 1, 2, 2, 3, 3,
+ 4, 4, 5, 5, 6, 6, 7, 7, 8, 8,
+ 9, 9, 10, 10, 11, 11, 12, 12, 13, 13,
/* extended window */
14, 14, 15, 15, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20,
}
-var offsetBase = []uint32 {
+var offsetBase = []uint32{
/* normal deflate */
0x000000, 0x000001, 0x000002, 0x000003, 0x000004,
0x000006, 0x000008, 0x00000c, 0x000010, 0x000018,
@@ -66,37 +65,35 @@ var offsetBase = []uint32 {
/* extended window */
0x008000, 0x00c000, 0x010000, 0x018000, 0x020000,
0x030000, 0x040000, 0x060000, 0x080000, 0x0c0000,
- 0x100000, 0x180000, 0x200000, 0x300000
+ 0x100000, 0x180000, 0x200000, 0x300000,
}
// The odd order in which the codegen code sizes are written.
-var codegenOrder = []uint32 {
- 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15
-}
+var codegenOrder = []uint32{16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15}
type huffmanBitWriter struct {
- w io.Writer;
+ w io.Writer;
// Data waiting to be written is bytes[0:nbytes]
// and then the low nbits of bits.
- bits uint32;
- nbits uint32;
- bytes [64]byte;
- nbytes int;
- literalFreq []int32;
- offsetFreq []int32;
- codegen []uint8;
- codegenFreq []int32;
- literalEncoding *huffmanEncoder;
- offsetEncoding *huffmanEncoder;
- codegenEncoding *huffmanEncoder;
- err os.Error;
+ bits uint32;
+ nbits uint32;
+ bytes [64]byte;
+ nbytes int;
+ literalFreq []int32;
+ offsetFreq []int32;
+ codegen []uint8;
+ codegenFreq []int32;
+ literalEncoding *huffmanEncoder;
+ offsetEncoding *huffmanEncoder;
+ codegenEncoding *huffmanEncoder;
+ err os.Error;
}
type WrongValueError struct {
- name string;
- from int32;
- to int32;
- value int32;
+ name string;
+ from int32;
+ to int32;
+ value int32;
}
func newHuffmanBitWriter(w io.Writer) *huffmanBitWriter {
@@ -175,7 +172,7 @@ func (w *huffmanBitWriter) writeBytes(bytes []byte) {
n++;
}
if w.nbits != 0 {
- w.err = InternalError("writeBytes with unfinished bits");
+ w.err = InternalError("writeBytes with unfinished bits");
return;
}
if n != 0 {
@@ -205,7 +202,7 @@ func (w *huffmanBitWriter) generateCodegen(numLiterals int, numOffsets int) {
// a copy of the frequencies, and as the place where we put the result.
// This is fine because the output is always shorter than the input used
// so far.
- codegen := w.codegen; // cache
+ codegen := w.codegen; // cache
// Copy the concatenated code sizes to codegen. Put a marker at the end.
copyUint8s(codegen[0 : numLiterals], w.literalEncoding.codeBits);
copyUint8s(codegen[numLiterals : numLiterals + numOffsets], w.offsetEncoding.codeBits);
@@ -232,7 +229,7 @@ func (w *huffmanBitWriter) generateCodegen(numLiterals int, numOffsets int) {
n := min(count, 6);
codegen[outIndex] = 16;
outIndex++;
- codegen[outIndex] = uint8(n - 3);
+ codegen[outIndex] = uint8(n-3);
outIndex++;
w.codegenFreq[16]++;
count -= n;
@@ -242,7 +239,7 @@ func (w *huffmanBitWriter) generateCodegen(numLiterals int, numOffsets int) {
n := min(count, 138);
codegen[outIndex] = 18;
outIndex++;
- codegen[outIndex] = uint8(n - 11);
+ codegen[outIndex] = uint8(n-11);
outIndex++;
w.codegenFreq[18]++;
count -= n;
@@ -251,7 +248,7 @@ func (w *huffmanBitWriter) generateCodegen(numLiterals int, numOffsets int) {
// count >= 3 && count <= 10
codegen[outIndex] = 17;
outIndex++;
- codegen[outIndex] = uint8(count - 3);
+ codegen[outIndex] = uint8(count-3);
outIndex++;
w.codegenFreq[17]++;
count = 0;
@@ -295,8 +292,8 @@ func (w *huffmanBitWriter) writeDynamicHeader(numLiterals int, numOffsets int, n
w.writeBits(int32(numLiterals - 257), 5);
if numOffsets > offsetCodeCount {
// Extended version of deflater
- w.writeBits(int32(offsetCodeCount + ((numOffsets - (1 + offsetCodeCount)) >> 3)), 5);
- w.writeBits(int32((numOffsets - (1 + offsetCodeCount)) & 0x7), 3);
+ w.writeBits(int32(offsetCodeCount + ((numOffsets - (1 + offsetCodeCount))>>3)), 5);
+ w.writeBits(int32((numOffsets - (1 + offsetCodeCount))&0x7), 3);
} else {
w.writeBits(int32(numOffsets - 1), 5);
}
@@ -368,10 +365,10 @@ func (w *huffmanBitWriter) writeBlock(tokens []token, eof bool, input []byte) {
fillInt32s(w.offsetFreq, 0);
n := len(tokens);
- tokens = tokens[0:n+1];
+ tokens = tokens[0 : n+1];
tokens[n] = endBlockMarker;
- totalLength := -1; // Subtract 1 for endBlock.
+ totalLength := -1; // Subtract 1 for endBlock.
for _, t := range tokens {
switch t.typ() {
case literalType:
@@ -381,7 +378,7 @@ func (w *huffmanBitWriter) writeBlock(tokens []token, eof bool, input []byte) {
case matchType:
length := t.length();
offset := t.offset();
- totalLength += int(length + 3);
+ totalLength += int(length+3);
w.literalFreq[lengthCodesStart + lengthCode(length)]++;
w.offsetFreq[offsetCode(offset)]++;
break;
@@ -407,18 +404,18 @@ func (w *huffmanBitWriter) writeBlock(tokens []token, eof bool, input []byte) {
var extraBits int64;
var storedSize int64;
if storedBytes <= maxStoreBlockSize && input != nil {
- storedSize = int64((storedBytes + 5) * 8);
+ storedSize = int64((storedBytes + 5)*8);
// We only bother calculating the costs of the extra bits required by
// the length of offset fields (which will be the same for both fixed
// and dynamic encoding), if we need to compare those two encodings
// against stored encoding.
for lengthCode := lengthCodesStart + 8; lengthCode < numLiterals; lengthCode++ {
// First eight length codes have extra size = 0.
- extraBits += int64(w.literalFreq[lengthCode]) * int64(lengthExtraBits[lengthCode - lengthCodesStart]);
+ extraBits += int64(w.literalFreq[lengthCode])*int64(lengthExtraBits[lengthCode - lengthCodesStart]);
}
for offsetCode := 4; offsetCode < numOffsets; offsetCode++ {
// First four offset codes have extra size = 0.
- extraBits += int64(w.offsetFreq[offsetCode]) * int64(offsetExtraBits[offsetCode]);
+ extraBits += int64(w.offsetFreq[offsetCode])*int64(offsetExtraBits[offsetCode]);
}
} else {
storedSize = math.MaxInt32;
@@ -445,7 +442,7 @@ func (w *huffmanBitWriter) writeBlock(tokens []token, eof bool, input []byte) {
if numOffsets > offsetCodeCount {
extensionSummand = 3;
}
- dynamicHeader := int64(3 + 5 + 5 + 4 + (3 * numCodegens)) +
+ dynamicHeader := int64(3+5+5+4+(3 * numCodegens)) +
// Following line is an extension.
int64(extensionSummand) +
w.codegenEncoding.bitLength(w.codegenFreq) +
@@ -459,7 +456,7 @@ func (w *huffmanBitWriter) writeBlock(tokens []token, eof bool, input []byte) {
if storedSize < fixedSize && storedSize < dynamicSize {
w.writeStoredHeader(storedBytes, eof);
- w.writeBytes(input[0:storedBytes]);
+ w.writeBytes(input[0 : storedBytes]);
return;
}
var literalEncoding *huffmanEncoder;
@@ -507,4 +504,3 @@ func (w *huffmanBitWriter) writeBlock(tokens []token, eof bool, input []byte) {
}
}
}
-
diff --git a/src/pkg/compress/flate/inflate.go b/src/pkg/compress/flate/inflate.go
index 4c4780156..302cbd376 100644
--- a/src/pkg/compress/flate/inflate.go
+++ b/src/pkg/compress/flate/inflate.go
@@ -66,10 +66,10 @@ type huffmanDecoder struct {
// limit[i] = largest code word of length i
// Given code v of length n,
// need more bits if v > limit[n].
- limit [maxCodeLen+1]int;
+ limit [maxCodeLen + 1]int;
// base[i] = smallest code word of length i - seq number
- base [maxCodeLen+1]int;
+ base [maxCodeLen + 1]int;
// codes[seq number] = output code.
// Given code v of length n, value is
@@ -83,7 +83,7 @@ func (h *huffmanDecoder) init(bits []int) bool {
// Count number of codes of each length,
// compute min and max length.
- var count [maxCodeLen+1]int;
+ var count [maxCodeLen + 1]int;
var min, max int;
for _, n := range bits {
if n == 0 {
@@ -142,8 +142,8 @@ func (h *huffmanDecoder) init(bits []int) bool {
// See RFC 1951, section 3.2.6.
var fixedHuffmanDecoder = huffmanDecoder{
7, 9,
- [maxCodeLen+1]int{7: 23, 199, 511},
- [maxCodeLen+1]int{7: 0, 24, 224},
+ [maxCodeLen + 1]int{7: 23, 199, 511},
+ [maxCodeLen + 1]int{7: 0, 24, 224},
[]int{
// length 7: 256-279
256, 257, 258, 259, 260, 261, 262,
@@ -271,11 +271,11 @@ func (f *inflater) readHuffman() os.Error {
return err;
}
}
- nlit := int(f.b & 0x1F) + 257;
+ nlit := int(f.b & 0x1F)+257;
f.b >>= 5;
- ndist := int(f.b & 0x1F) + 1;
+ ndist := int(f.b & 0x1F)+1;
f.b >>= 5;
- nclen := int(f.b & 0xF) + 4;
+ nclen := int(f.b & 0xF)+4;
f.b >>= 4;
f.nb -= 5+5+4;
@@ -437,7 +437,7 @@ func (f *inflater) decodeBlock(hl, hd *huffmanDecoder) os.Error {
case dist >= 30:
return CorruptInputError(f.roffset);
default:
- nb := uint(dist-2) >> 1;
+ nb := uint(dist-2)>>1;
// have 1 bit in bottom of dist, need nb more.
extra := (dist&1)<<nb;
for f.nb < nb {
@@ -495,8 +495,8 @@ func (f *inflater) dataBlock() os.Error {
if err != nil {
return &ReadError{f.roffset, err};
}
- n := int(f.buf[0]) | int(f.buf[1]) << 8;
- nn := int(f.buf[2]) | int(f.buf[3]) << 8;
+ n := int(f.buf[0]) | int(f.buf[1])<<8;
+ nn := int(f.buf[2]) | int(f.buf[3])<<8;
if uint16(nn) != uint16(^n) {
return CorruptInputError(f.roffset);
}
diff --git a/src/pkg/compress/flate/reverse_bits.go b/src/pkg/compress/flate/reverse_bits.go
index 7274541d0..76693d475 100644
--- a/src/pkg/compress/flate/reverse_bits.go
+++ b/src/pkg/compress/flate/reverse_bits.go
@@ -44,5 +44,5 @@ func reverseUint16(v uint16) uint16 {
}
func reverseBits(number uint16, bitLength byte) uint16 {
- return reverseUint16(number << uint8(16-bitLength));
+ return reverseUint16(number<<uint8(16 - bitLength));
}
diff --git a/src/pkg/compress/flate/token.go b/src/pkg/compress/flate/token.go
index daa23da55..476eae783 100644
--- a/src/pkg/compress/flate/token.go
+++ b/src/pkg/compress/flate/token.go
@@ -9,7 +9,7 @@ const (
// 8 bits: xlength = length - MIN_MATCH_LENGTH
// 22 bits xoffset = offset - MIN_OFFSET_SIZE, or literal
lengthShift = 22;
- offsetMask = 1<<lengthShift - 1;
+ offsetMask = 1 << lengthShift - 1;
typeMask = 3<<30;
literalType = 0<<30;
matchType = 1<<30;
@@ -69,12 +69,12 @@ type token uint32
// Convert a literal into a literal token.
func literalToken(literal uint32) token {
- return token(literalType+literal);
+ return token(literalType + literal);
}
// Convert a < xlength, xoffset > pair into a match token.
func matchToken(xlength uint32, xoffset uint32) token {
- return token(matchType + xlength<<lengthShift + xoffset);
+ return token(matchType + xlength << lengthShift + xoffset);
}
// Returns the type of a token
@@ -84,16 +84,16 @@ func (t token) typ() uint32 {
// Returns the literal of a literal token
func (t token) literal() uint32 {
- return uint32(t-literalType);
+ return uint32(t - literalType);
}
// Returns the extra offset of a match token
func (t token) offset() uint32 {
- return uint32(t)&offsetMask;
+ return uint32(t) & offsetMask;
}
func (t token) length() uint32 {
- return uint32((t-matchType)>>lengthShift);
+ return uint32((t - matchType) >> lengthShift);
}
func lengthCode(len uint32) uint32 {
@@ -107,9 +107,9 @@ func offsetCode(off uint32) uint32 {
case off < n:
return offsetCodes[off];
case off>>7 < n:
- return offsetCodes[off>>7]+14;
+ return offsetCodes[off>>7] + 14;
default:
- return offsetCodes[off>>14]+28;
+ return offsetCodes[off>>14] + 28;
}
panic("unreachable");
}
diff --git a/src/pkg/compress/gzip/gunzip.go b/src/pkg/compress/gzip/gunzip.go
index c13184d72..07906cd38 100644
--- a/src/pkg/compress/gzip/gunzip.go
+++ b/src/pkg/compress/gzip/gunzip.go
@@ -110,7 +110,7 @@ func (z *Inflater) read2() (uint32, os.Error) {
if err != nil {
return 0, err;
}
- return uint32(z.buf[0]) | uint32(z.buf[1]) << 8, nil;
+ return uint32(z.buf[0]) | uint32(z.buf[1])<<8, nil;
}
func (z *Inflater) readHeader(save bool) os.Error {
diff --git a/src/pkg/compress/gzip/gunzip_test.go b/src/pkg/compress/gzip/gunzip_test.go
index 8cc7890e4..867f61efa 100644
--- a/src/pkg/compress/gzip/gunzip_test.go
+++ b/src/pkg/compress/gzip/gunzip_test.go
@@ -12,31 +12,31 @@ import (
)
type gzipTest struct {
- name string;
- desc string;
- raw string;
- gzip []byte;
- err os.Error;
+ name string;
+ desc string;
+ raw string;
+ gzip []byte;
+ err os.Error;
}
-var gzipTests = []gzipTest {
- gzipTest { // has 1 empty fixed-huffman block
+var gzipTests = []gzipTest{
+ gzipTest{ // has 1 empty fixed-huffman block
"empty.txt",
"empty.txt",
"",
- []byte {
+ []byte{
0x1f, 0x8b, 0x08, 0x08, 0xf7, 0x5e, 0x14, 0x4a,
0x00, 0x03, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e,
0x74, 0x78, 0x74, 0x00, 0x03, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
},
- nil
+ nil,
},
- gzipTest { // has 1 non-empty fixed huffman block
+ gzipTest{ // has 1 non-empty fixed huffman block
"hello.txt",
"hello.txt",
"hello world\n",
- []byte {
+ []byte{
0x1f, 0x8b, 0x08, 0x08, 0xc8, 0x58, 0x13, 0x4a,
0x00, 0x03, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2e,
0x74, 0x78, 0x74, 0x00, 0xcb, 0x48, 0xcd, 0xc9,
@@ -44,14 +44,14 @@ var gzipTests = []gzipTest {
0x02, 0x00, 0x2d, 0x3b, 0x08, 0xaf, 0x0c, 0x00,
0x00, 0x00,
},
- nil
+ nil,
},
- gzipTest { // concatenation
+ gzipTest{ // concatenation
"hello.txt",
"hello.txt x2",
"hello world\n"
"hello world\n",
- []byte {
+ []byte{
0x1f, 0x8b, 0x08, 0x08, 0xc8, 0x58, 0x13, 0x4a,
0x00, 0x03, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2e,
0x74, 0x78, 0x74, 0x00, 0xcb, 0x48, 0xcd, 0xc9,
@@ -65,13 +65,13 @@ var gzipTests = []gzipTest {
0x02, 0x00, 0x2d, 0x3b, 0x08, 0xaf, 0x0c, 0x00,
0x00, 0x00,
},
- nil
+ nil,
},
- gzipTest { // has a fixed huffman block with some length-distance pairs
+ gzipTest{ // has a fixed huffman block with some length-distance pairs
"shesells.txt",
"shesells.txt",
"she sells seashells by the seashore\n",
- []byte {
+ []byte{
0x1f, 0x8b, 0x08, 0x08, 0x72, 0x66, 0x8b, 0x4a,
0x00, 0x03, 0x73, 0x68, 0x65, 0x73, 0x65, 0x6c,
0x6c, 0x73, 0x2e, 0x74, 0x78, 0x74, 0x00, 0x2b,
@@ -81,9 +81,9 @@ var gzipTests = []gzipTest {
0x94, 0xca, 0x05, 0x00, 0x76, 0xb0, 0x3b, 0xeb,
0x24, 0x00, 0x00, 0x00,
},
- nil
+ nil,
},
- gzipTest { // has dynamic huffman blocks
+ gzipTest{ // has dynamic huffman blocks
"gettysburg",
"gettysburg",
" Four score and seven years ago our fathers brought forth on\n"
@@ -115,7 +115,7 @@ var gzipTests = []gzipTest {
"people, for the people, shall not perish from this earth.\n"
"\n"
"Abraham Lincoln, November 19, 1863, Gettysburg, Pennsylvania\n",
- []byte {
+ []byte{
0x1f, 0x8b, 0x08, 0x08, 0xd1, 0x12, 0x2b, 0x4a,
0x00, 0x03, 0x67, 0x65, 0x74, 0x74, 0x79, 0x73,
0x62, 0x75, 0x72, 0x67, 0x00, 0x65, 0x54, 0xcd,
@@ -219,13 +219,13 @@ var gzipTests = []gzipTest {
0x4a, 0x65, 0x8f, 0x08, 0x42, 0x60, 0xf7, 0x0f,
0xb9, 0x16, 0x0b, 0x0c, 0x1a, 0x06, 0x00, 0x00,
},
- nil
+ nil,
},
- gzipTest { // has 1 non-empty fixed huffman block then garbage
+ gzipTest{ // has 1 non-empty fixed huffman block then garbage
"hello.txt",
"hello.txt + garbage",
"hello world\n",
- []byte {
+ []byte{
0x1f, 0x8b, 0x08, 0x08, 0xc8, 0x58, 0x13, 0x4a,
0x00, 0x03, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2e,
0x74, 0x78, 0x74, 0x00, 0xcb, 0x48, 0xcd, 0xc9,
@@ -235,11 +235,11 @@ var gzipTests = []gzipTest {
},
HeaderError,
},
- gzipTest { // has 1 non-empty fixed huffman block not enough header
+ gzipTest{ // has 1 non-empty fixed huffman block not enough header
"hello.txt",
"hello.txt + garbage",
"hello world\n",
- []byte {
+ []byte{
0x1f, 0x8b, 0x08, 0x08, 0xc8, 0x58, 0x13, 0x4a,
0x00, 0x03, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2e,
0x74, 0x78, 0x74, 0x00, 0xcb, 0x48, 0xcd, 0xc9,
@@ -249,11 +249,11 @@ var gzipTests = []gzipTest {
},
io.ErrUnexpectedEOF,
},
- gzipTest { // has 1 non-empty fixed huffman block but corrupt checksum
+ gzipTest{ // has 1 non-empty fixed huffman block but corrupt checksum
"hello.txt",
"hello.txt + corrupt checksum",
"hello world\n",
- []byte {
+ []byte{
0x1f, 0x8b, 0x08, 0x08, 0xc8, 0x58, 0x13, 0x4a,
0x00, 0x03, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2e,
0x74, 0x78, 0x74, 0x00, 0xcb, 0x48, 0xcd, 0xc9,
@@ -263,11 +263,11 @@ var gzipTests = []gzipTest {
},
ChecksumError,
},
- gzipTest { // has 1 non-empty fixed huffman block but corrupt size
+ gzipTest{ // has 1 non-empty fixed huffman block but corrupt size
"hello.txt",
"hello.txt + corrupt size",
"hello world\n",
- []byte {
+ []byte{
0x1f, 0x8b, 0x08, 0x08, 0xc8, 0x58, 0x13, 0x4a,
0x00, 0x03, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2e,
0x74, 0x78, 0x74, 0x00, 0xcb, 0x48, 0xcd, 0xc9,
@@ -303,4 +303,3 @@ func TestInflater(t *testing.T) {
}
}
}
-
diff --git a/src/pkg/compress/zlib/reader.go b/src/pkg/compress/zlib/reader.go
index 96dbbf992..cf0eb1b29 100644
--- a/src/pkg/compress/zlib/reader.go
+++ b/src/pkg/compress/zlib/reader.go
@@ -43,7 +43,7 @@ func NewInflater(r io.Reader) (io.ReadCloser, os.Error) {
if err != nil {
return nil, err;
}
- h := uint(z.scratch[0]) << 8 | uint(z.scratch[1]);
+ h := uint(z.scratch[0])<<8 | uint(z.scratch[1]);
if (z.scratch[0] & 0x0f != zlibDeflate) || (h%31 != 0) {
return nil, HeaderError;
}
@@ -77,7 +77,7 @@ func (z *reader) Read(p []byte) (n int, err os.Error) {
return 0, err;
}
// ZLIB (RFC 1950) is big-endian, unlike GZIP (RFC 1952).
- checksum := uint32(z.scratch[0]) << 24 | uint32(z.scratch[1]) << 16 | uint32(z.scratch[2]) << 8 | uint32(z.scratch[3]);
+ checksum := uint32(z.scratch[0])<<24 | uint32(z.scratch[1])<<16 | uint32(z.scratch[2])<<8 | uint32(z.scratch[3]);
if checksum != z.digest.Sum32() {
z.err = ChecksumError;
return 0, z.err;
diff --git a/src/pkg/container/vector/vector.go b/src/pkg/container/vector/vector.go
index c29edb83f..fc7cf64ae 100644
--- a/src/pkg/container/vector/vector.go
+++ b/src/pkg/container/vector/vector.go
@@ -73,7 +73,7 @@ func (p *Vector) Init(initial_len int) *Vector {
}
}
- p.a = a[0:initial_len];
+ p.a = a[0 : initial_len];
return p;
}
@@ -108,7 +108,7 @@ func (p *Vector) Set(i int, x Element) {
// Last returns the element in the vector of highest index.
func (p *Vector) Last() Element {
- return p.a[len(p.a) - 1];
+ return p.a[len(p.a)-1];
}
@@ -146,7 +146,7 @@ func (p *Vector) Delete(i int) {
// x such that the 0th element of x appears at index i after insertion.
func (p *Vector) InsertVector(i int, x *Vector) {
p.a = expand(p.a, i, len(x.a));
- copy(p.a[i : i + len(x.a)], x.a);
+ copy(p.a[i : i+len(x.a)], x.a);
}
@@ -193,7 +193,7 @@ func (p *Vector) Push(x Element) {
// Pop deletes the last element of the vector.
func (p *Vector) Pop() Element {
- i := len(p.a) - 1;
+ i := len(p.a)-1;
x := p.a[i];
p.a[i] = nil; // support GC, nil out entry
p.a = p.a[0:i];
diff --git a/src/pkg/crypto/aes/aes_test.go b/src/pkg/crypto/aes/aes_test.go
index 6488d54ba..4bdf358f6 100644
--- a/src/pkg/crypto/aes/aes_test.go
+++ b/src/pkg/crypto/aes/aes_test.go
@@ -278,7 +278,7 @@ var encryptTests = []CryptTest{
// Test encryptBlock against FIPS 197 examples.
func TestEncryptBlock(t *testing.T) {
for i, tt := range encryptTests {
- n := len(tt.key) + 28;
+ n := len(tt.key)+28;
enc := make([]uint32, n);
dec := make([]uint32, n);
expandKey(tt.key, enc, dec);
@@ -296,7 +296,7 @@ func TestEncryptBlock(t *testing.T) {
// Test decryptBlock against FIPS 197 examples.
func TestDecryptBlock(t *testing.T) {
for i, tt := range encryptTests {
- n := len(tt.key) + 28;
+ n := len(tt.key)+28;
enc := make([]uint32, n);
dec := make([]uint32, n);
expandKey(tt.key, enc, dec);
diff --git a/src/pkg/crypto/aes/block.go b/src/pkg/crypto/aes/block.go
index fb4efc191..738deba66 100644
--- a/src/pkg/crypto/aes/block.go
+++ b/src/pkg/crypto/aes/block.go
@@ -56,19 +56,19 @@ func encryptBlock(xk []uint32, src, dst []byte) {
nr := len(xk)/4 - 2; // - 2: one above, one more below
k := 4;
for r := 0; r < nr; r++ {
- t0 = xk[k+0] ^ te[0][s0>>24] ^ te[1][s1>>16 & 0xff] ^ te[2][s2>>8 & 0xff] ^ te[3][s3 & 0xff];
- t1 = xk[k+1] ^ te[0][s1>>24] ^ te[1][s2>>16 & 0xff] ^ te[2][s3>>8 & 0xff] ^ te[3][s0 & 0xff];
- t2 = xk[k+2] ^ te[0][s2>>24] ^ te[1][s3>>16 & 0xff] ^ te[2][s0>>8 & 0xff] ^ te[3][s1 & 0xff];
- t3 = xk[k+3] ^ te[0][s3>>24] ^ te[1][s0>>16 & 0xff] ^ te[2][s1>>8 & 0xff] ^ te[3][s2 & 0xff];
+ t0 = xk[k+0]^te[0][s0>>24]^te[1][s1>>16&0xff]^te[2][s2>>8&0xff]^te[3][s3&0xff];
+ t1 = xk[k+1]^te[0][s1>>24]^te[1][s2>>16&0xff]^te[2][s3>>8&0xff]^te[3][s0&0xff];
+ t2 = xk[k+2]^te[0][s2>>24]^te[1][s3>>16&0xff]^te[2][s0>>8&0xff]^te[3][s1&0xff];
+ t3 = xk[k+3]^te[0][s3>>24]^te[1][s0>>16&0xff]^te[2][s1>>8&0xff]^te[3][s2&0xff];
k += 4;
s0, s1, s2, s3 = t0, t1, t2, t3;
}
// Last round uses s-box directly and XORs to produce output.
- s0 = uint32(sbox0[t0>>24])<<24 | uint32(sbox0[t1>>16 & 0xff])<<16 | uint32(sbox0[t2>>8 & 0xff])<<8 | uint32(sbox0[t3 & 0xff]);
- s1 = uint32(sbox0[t1>>24])<<24 | uint32(sbox0[t2>>16 & 0xff])<<16 | uint32(sbox0[t3>>8 & 0xff])<<8 | uint32(sbox0[t0 & 0xff]);
- s2 = uint32(sbox0[t2>>24])<<24 | uint32(sbox0[t3>>16 & 0xff])<<16 | uint32(sbox0[t0>>8 & 0xff])<<8 | uint32(sbox0[t1 & 0xff]);
- s3 = uint32(sbox0[t3>>24])<<24 | uint32(sbox0[t0>>16 & 0xff])<<16 | uint32(sbox0[t1>>8 & 0xff])<<8 | uint32(sbox0[t2 & 0xff]);
+ s0 = uint32(sbox0[t0>>24])<<24 | uint32(sbox0[t1>>16&0xff])<<16 | uint32(sbox0[t2>>8&0xff])<<8 | uint32(sbox0[t3&0xff]);
+ s1 = uint32(sbox0[t1>>24])<<24 | uint32(sbox0[t2>>16&0xff])<<16 | uint32(sbox0[t3>>8&0xff])<<8 | uint32(sbox0[t0&0xff]);
+ s2 = uint32(sbox0[t2>>24])<<24 | uint32(sbox0[t3>>16&0xff])<<16 | uint32(sbox0[t0>>8&0xff])<<8 | uint32(sbox0[t1&0xff]);
+ s3 = uint32(sbox0[t3>>24])<<24 | uint32(sbox0[t0>>16&0xff])<<16 | uint32(sbox0[t1>>8&0xff])<<8 | uint32(sbox0[t2&0xff]);
s0 ^= xk[k+0];
s1 ^= xk[k+1];
@@ -101,19 +101,19 @@ func decryptBlock(xk []uint32, src, dst []byte) {
nr := len(xk)/4 - 2; // - 2: one above, one more below
k := 4;
for r := 0; r < nr; r++ {
- t0 = xk[k+0] ^ td[0][s0>>24] ^ td[1][s3>>16 & 0xff] ^ td[2][s2>>8 & 0xff] ^ td[3][s1 & 0xff];
- t1 = xk[k+1] ^ td[0][s1>>24] ^ td[1][s0>>16 & 0xff] ^ td[2][s3>>8 & 0xff] ^ td[3][s2 & 0xff];
- t2 = xk[k+2] ^ td[0][s2>>24] ^ td[1][s1>>16 & 0xff] ^ td[2][s0>>8 & 0xff] ^ td[3][s3 & 0xff];
- t3 = xk[k+3] ^ td[0][s3>>24] ^ td[1][s2>>16 & 0xff] ^ td[2][s1>>8 & 0xff] ^ td[3][s0 & 0xff];
+ t0 = xk[k+0]^td[0][s0>>24]^td[1][s3>>16&0xff]^td[2][s2>>8&0xff]^td[3][s1&0xff];
+ t1 = xk[k+1]^td[0][s1>>24]^td[1][s0>>16&0xff]^td[2][s3>>8&0xff]^td[3][s2&0xff];
+ t2 = xk[k+2]^td[0][s2>>24]^td[1][s1>>16&0xff]^td[2][s0>>8&0xff]^td[3][s3&0xff];
+ t3 = xk[k+3]^td[0][s3>>24]^td[1][s2>>16&0xff]^td[2][s1>>8&0xff]^td[3][s0&0xff];
k += 4;
s0, s1, s2, s3 = t0, t1, t2, t3;
}
// Last round uses s-box directly and XORs to produce output.
- s0 = uint32(sbox1[t0>>24])<<24 | uint32(sbox1[t3>>16 & 0xff])<<16 | uint32(sbox1[t2>>8 & 0xff])<<8 | uint32(sbox1[t1 & 0xff]);
- s1 = uint32(sbox1[t1>>24])<<24 | uint32(sbox1[t0>>16 & 0xff])<<16 | uint32(sbox1[t3>>8 & 0xff])<<8 | uint32(sbox1[t2 & 0xff]);
- s2 = uint32(sbox1[t2>>24])<<24 | uint32(sbox1[t1>>16 & 0xff])<<16 | uint32(sbox1[t0>>8 & 0xff])<<8 | uint32(sbox1[t3 & 0xff]);
- s3 = uint32(sbox1[t3>>24])<<24 | uint32(sbox1[t2>>16 & 0xff])<<16 | uint32(sbox1[t1>>8 & 0xff])<<8 | uint32(sbox1[t0 & 0xff]);
+ s0 = uint32(sbox1[t0>>24])<<24 | uint32(sbox1[t3>>16&0xff])<<16 | uint32(sbox1[t2>>8&0xff])<<8 | uint32(sbox1[t1&0xff]);
+ s1 = uint32(sbox1[t1>>24])<<24 | uint32(sbox1[t0>>16&0xff])<<16 | uint32(sbox1[t3>>8&0xff])<<8 | uint32(sbox1[t2&0xff]);
+ s2 = uint32(sbox1[t2>>24])<<24 | uint32(sbox1[t1>>16&0xff])<<16 | uint32(sbox1[t0>>8&0xff])<<8 | uint32(sbox1[t3&0xff]);
+ s3 = uint32(sbox1[t3>>24])<<24 | uint32(sbox1[t2>>16&0xff])<<16 | uint32(sbox1[t1>>8&0xff])<<8 | uint32(sbox1[t0&0xff]);
s0 ^= xk[k+0];
s1 ^= xk[k+1];
@@ -128,11 +128,10 @@ func decryptBlock(xk []uint32, src, dst []byte) {
// Apply sbox0 to each byte in w.
func subw(w uint32) uint32 {
- return
- uint32(sbox0[w>>24])<<24 |
- uint32(sbox0[w>>16 & 0xff])<<16 |
- uint32(sbox0[w>>8 & 0xff])<<8 |
- uint32(sbox0[w & 0xff]);
+ return uint32(sbox0[w>>24])<<24 |
+ uint32(sbox0[w>>16&0xff])<<16 |
+ uint32(sbox0[w>>8&0xff])<<8 |
+ uint32(sbox0[w&0xff]);
}
// Rotate
@@ -145,18 +144,18 @@ func rotw(w uint32) uint32 {
func expandKey(key []byte, enc, dec []uint32) {
// Encryption key setup.
var i int;
- nk := len(key) / 4;
+ nk := len(key)/4;
for i = 0; i < nk; i++ {
- enc[i] = uint32(key[4*i])<<24 | uint32(key[4*i+1])<<16 | uint32(key[4*i+2])<<8 | uint32(key[4*i+3]);
+ enc[i] = uint32(key[4*i])<<24 | uint32(key[4*i + 1])<<16 | uint32(key[4*i + 2])<<8 | uint32(key[4*i + 3]);
}
for ; i < len(enc); i++ {
t := enc[i-1];
- if i % nk == 0 {
- t = subw(rotw(t)) ^ (uint32(powx[i/nk - 1]) << 24);
- } else if nk > 6 && i % nk == 4 {
+ if i%nk == 0 {
+ t = subw(rotw(t))^(uint32(powx[i/nk - 1])<<24);
+ } else if nk > 6 && i%nk == 4 {
t = subw(t);
}
- enc[i] = enc[i-nk] ^ t;
+ enc[i] = enc[i-nk]^t;
}
// Derive decryption key from encryption key.
@@ -167,14 +166,13 @@ func expandKey(key []byte, enc, dec []uint32) {
}
n := len(enc);
for i := 0; i < n; i += 4 {
- ei := n - i - 4;
+ ei := n-i-4;
for j := 0; j < 4; j++ {
x := enc[ei+j];
if i > 0 && i+4 < n {
- x = td[0][sbox0[x>>24]] ^ td[1][sbox0[x>>16 & 0xff]] ^ td[2][sbox0[x>>8 & 0xff]] ^ td[3][sbox0[x & 0xff]];
+ x = td[0][sbox0[x>>24]]^td[1][sbox0[x>>16&0xff]]^td[2][sbox0[x>>8&0xff]]^td[3][sbox0[x&0xff]];
}
dec[i+j] = x;
}
}
}
-
diff --git a/src/pkg/crypto/block/ctr.go b/src/pkg/crypto/block/ctr.go
index 235e3b974..1e238c43c 100644
--- a/src/pkg/crypto/block/ctr.go
+++ b/src/pkg/crypto/block/ctr.go
@@ -35,7 +35,7 @@ func (x *ctrStream) Next() []byte {
x.c.Encrypt(x.ctr, x.out);
// Increment counter
- for i := len(x.ctr) - 1; i >= 0; i-- {
+ for i := len(x.ctr)-1; i >= 0; i-- {
x.ctr[i]++;
if x.ctr[i] != 0 {
break;
diff --git a/src/pkg/crypto/block/ctr_aes_test.go b/src/pkg/crypto/block/ctr_aes_test.go
index f34fadc74..c075f96f6 100644
--- a/src/pkg/crypto/block/ctr_aes_test.go
+++ b/src/pkg/crypto/block/ctr_aes_test.go
@@ -79,7 +79,7 @@ func TestCTR_AES(t *testing.T) {
for j := 0; j <= 5; j += 5 {
var crypt bytes.Buffer;
- in := tt.in[0 : len(tt.in) - j];
+ in := tt.in[0 : len(tt.in)-j];
w := NewCTRWriter(c, tt.iv, &crypt);
var r io.Reader = bytes.NewBuffer(in);
n, err := io.Copy(r, w);
@@ -92,7 +92,7 @@ func TestCTR_AES(t *testing.T) {
for j := 0; j <= 7; j += 7 {
var plain bytes.Buffer;
- out := tt.out[0 : len(tt.out) - j];
+ out := tt.out[0 : len(tt.out)-j];
r := NewCTRReader(c, tt.iv, bytes.NewBuffer(out));
w := &plain;
n, err := io.Copy(r, w);
diff --git a/src/pkg/crypto/block/ecb.go b/src/pkg/crypto/block/ecb.go
index af8f26cc6..f2455ff0a 100644
--- a/src/pkg/crypto/block/ecb.go
+++ b/src/pkg/crypto/block/ecb.go
@@ -66,7 +66,7 @@ func (x *ecbDecrypter) readPlain(p []byte) int {
p[i] = x.plain[i];
}
if n < len(x.plain) {
- x.plain = x.plain[n : len(x.plain)];
+ x.plain = x.plain[n:len(x.plain)];
} else {
x.plain = nil;
}
@@ -172,7 +172,7 @@ func (x *ecbEncrypter) flushCrypt() os.Error {
}
n, err := x.w.Write(x.crypt);
if n < len(x.crypt) {
- x.crypt = x.crypt[n : len(x.crypt)];
+ x.crypt = x.crypt[n:len(x.crypt)];
if err == nil {
err = io.ErrShortWrite;
}
@@ -196,7 +196,7 @@ func (x *ecbEncrypter) slidePlain() {
for i := 0; i < len(x.plain); i++ {
x.buf[i] = x.plain[i];
}
- x.plain = x.buf[0 : len(x.plain)];
+ x.plain = x.buf[0:len(x.plain)];
}
}
@@ -205,7 +205,7 @@ func (x *ecbEncrypter) slidePlain() {
func (x *ecbEncrypter) fillPlain(p []byte) int {
off := len(x.plain);
n := len(p);
- if max := cap(x.plain) - off; n > max {
+ if max := cap(x.plain)-off; n > max {
n = max;
}
x.plain = x.plain[0 : off+n];
diff --git a/src/pkg/crypto/block/ofb_aes_test.go b/src/pkg/crypto/block/ofb_aes_test.go
index e54a9ce67..31622c364 100644
--- a/src/pkg/crypto/block/ofb_aes_test.go
+++ b/src/pkg/crypto/block/ofb_aes_test.go
@@ -77,7 +77,7 @@ func TestOFB_AES(t *testing.T) {
for j := 0; j <= 5; j += 5 {
var crypt bytes.Buffer;
- in := tt.in[0 : len(tt.in) - j];
+ in := tt.in[0 : len(tt.in)-j];
w := NewOFBWriter(c, tt.iv, &crypt);
var r io.Reader = bytes.NewBuffer(in);
n, err := io.Copy(r, w);
@@ -90,7 +90,7 @@ func TestOFB_AES(t *testing.T) {
for j := 0; j <= 7; j += 7 {
var plain bytes.Buffer;
- out := tt.out[0 : len(tt.out) - j];
+ out := tt.out[0 : len(tt.out)-j];
r := NewOFBReader(c, tt.iv, bytes.NewBuffer(out));
w := &plain;
n, err := io.Copy(r, w);
diff --git a/src/pkg/datafmt/parser.go b/src/pkg/datafmt/parser.go
index d6382de40..9a90c22f4 100644
--- a/src/pkg/datafmt/parser.go
+++ b/src/pkg/datafmt/parser.go
@@ -55,7 +55,7 @@ func (p *parser) errorExpected(pos token.Position, msg string) {
// make the error message more specific
msg += ", found '" + p.tok.String() + "'";
if p.tok.IsLiteral() {
- msg += " " + string(p.lit);
+ msg += " "+string(p.lit);
}
}
p.Error(pos, msg);
@@ -347,7 +347,7 @@ func remap(p *parser, name string) string {
packageName, suffix := name[0:i], name[i:len(name)];
// lookup package
if importPath, found := p.packs[packageName]; found {
- name = importPath+suffix;
+ name = importPath + suffix;
} else {
var invalidPos token.Position;
p.Error(invalidPos, "package not declared: " + packageName);
diff --git a/src/pkg/debug/binary/binary.go b/src/pkg/debug/binary/binary.go
index 78e9baa17..836a43df0 100644
--- a/src/pkg/debug/binary/binary.go
+++ b/src/pkg/debug/binary/binary.go
@@ -141,8 +141,8 @@ func sizeof(t reflect.Type) int {
}
type decoder struct {
- order ByteOrder;
- buf []byte;
+ order ByteOrder;
+ buf []byte;
}
func (d *decoder) uint8() uint8 {
diff --git a/src/pkg/debug/dwarf/buf.go b/src/pkg/debug/dwarf/buf.go
index e4cb28e5d..2d8211090 100644
--- a/src/pkg/debug/dwarf/buf.go
+++ b/src/pkg/debug/dwarf/buf.go
@@ -33,7 +33,7 @@ func (b *buf) uint8() uint8 {
return 0;
}
val := b.data[0];
- b.data = b.data[1 : len(b.data)];
+ b.data = b.data[1:len(b.data)];
b.off++;
return val;
}
@@ -44,7 +44,7 @@ func (b *buf) bytes(n int) []byte {
return nil;
}
data := b.data[0:n];
- b.data = b.data[n : len(b.data)];
+ b.data = b.data[n:len(b.data)];
b.off += Offset(n);
return data;
}
@@ -95,7 +95,7 @@ func (b *buf) uint64() uint64 {
func (b *buf) varint() (c uint64, bits uint) {
for i := 0; i < len(b.data); i++ {
byte := b.data[i];
- c |= uint64(byte&0x7F) << bits;
+ c |= uint64(byte&0x7F)<<bits;
bits += 7;
if byte&0x80 == 0 {
b.off += Offset(i+1);
diff --git a/src/pkg/debug/dwarf/entry.go b/src/pkg/debug/dwarf/entry.go
index 0554a249b..97e58af40 100644
--- a/src/pkg/debug/dwarf/entry.go
+++ b/src/pkg/debug/dwarf/entry.go
@@ -190,15 +190,15 @@ func (b *buf) entry(atab abbrevTable, ubase Offset) *Entry {
case formRefAddr:
val = Offset(b.addr());
case formRef1:
- val = Offset(b.uint8()) + ubase;
+ val = Offset(b.uint8())+ubase;
case formRef2:
- val = Offset(b.uint16()) + ubase;
+ val = Offset(b.uint16())+ubase;
case formRef4:
- val = Offset(b.uint32()) + ubase;
+ val = Offset(b.uint32())+ubase;
case formRef8:
- val = Offset(b.uint64()) + ubase;
+ val = Offset(b.uint64())+ubase;
case formRefUdata:
- val = Offset(b.uint()) + ubase;
+ val = Offset(b.uint())+ubase;
// string
case formString:
diff --git a/src/pkg/debug/elf/elf_test.go b/src/pkg/debug/elf/elf_test.go
index c6cf7bfe4..c48e6d959 100644
--- a/src/pkg/debug/elf/elf_test.go
+++ b/src/pkg/debug/elf/elf_test.go
@@ -20,7 +20,7 @@ var nameTests = []nameTest{
nameTest{EM_860, "EM_860"},
nameTest{SHN_LOPROC, "SHN_LOPROC"},
nameTest{SHT_PROGBITS, "SHT_PROGBITS"},
- nameTest{SHF_MERGE+SHF_TLS, "SHF_MERGE+SHF_TLS"},
+ nameTest{SHF_MERGE + SHF_TLS, "SHF_MERGE+SHF_TLS"},
nameTest{PT_LOAD, "PT_LOAD"},
nameTest{PF_W+PF_R+0x50, "PF_W+PF_R+0x50"},
nameTest{DT_SYMBOLIC, "DT_SYMBOLIC"},
diff --git a/src/pkg/debug/elf/file.go b/src/pkg/debug/elf/file.go
index f2d665797..0c5d6f317 100644
--- a/src/pkg/debug/elf/file.go
+++ b/src/pkg/debug/elf/file.go
@@ -298,7 +298,7 @@ func NewFile(r io.ReaderAt) (*File, os.Error) {
var ok bool;
s.Name, ok = getString(shstrtab, int(names[i]));
if !ok {
- return nil, &FormatError{shoff + int64(i*shentsize), "bad section name index", names[i]};
+ return nil, &FormatError{shoff+int64(i * shentsize), "bad section name index", names[i]};
}
}
diff --git a/src/pkg/debug/elf/file_test.go b/src/pkg/debug/elf/file_test.go
index 140841b6d..01e638eea 100644
--- a/src/pkg/debug/elf/file_test.go
+++ b/src/pkg/debug/elf/file_test.go
@@ -32,14 +32,14 @@ var fileTests = []fileTest{
SectionHeader{".text", SHT_PROGBITS, SHF_ALLOC + SHF_EXECINSTR, 0x80483cc, 0x3cc, 0x180, 0x0, 0x0, 0x4, 0x0},
SectionHeader{".fini", SHT_PROGBITS, SHF_ALLOC + SHF_EXECINSTR, 0x804854c, 0x54c, 0xc, 0x0, 0x0, 0x4, 0x0},
SectionHeader{".rodata", SHT_PROGBITS, SHF_ALLOC, 0x8048558, 0x558, 0xa3, 0x0, 0x0, 0x1, 0x0},
- SectionHeader{".data", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x80495fc, 0x5fc, 0xc, 0x0, 0x0, 0x4, 0x0},
+ SectionHeader{".data", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x80495fc, 0x5fc, 0xc, 0x0, 0x0, 0x4, 0x0},
SectionHeader{".eh_frame", SHT_PROGBITS, SHF_ALLOC, 0x8049608, 0x608, 0x4, 0x0, 0x0, 0x4, 0x0},
- SectionHeader{".dynamic", SHT_DYNAMIC, SHF_WRITE+SHF_ALLOC, 0x804960c, 0x60c, 0x98, 0x4, 0x0, 0x4, 0x8},
- SectionHeader{".ctors", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x80496a4, 0x6a4, 0x8, 0x0, 0x0, 0x4, 0x0},
- SectionHeader{".dtors", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x80496ac, 0x6ac, 0x8, 0x0, 0x0, 0x4, 0x0},
- SectionHeader{".jcr", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x80496b4, 0x6b4, 0x4, 0x0, 0x0, 0x4, 0x0},
- SectionHeader{".got", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x80496b8, 0x6b8, 0x1c, 0x0, 0x0, 0x4, 0x4},
- SectionHeader{".bss", SHT_NOBITS, SHF_WRITE+SHF_ALLOC, 0x80496d4, 0x6d4, 0x20, 0x0, 0x0, 0x4, 0x0},
+ SectionHeader{".dynamic", SHT_DYNAMIC, SHF_WRITE + SHF_ALLOC, 0x804960c, 0x60c, 0x98, 0x4, 0x0, 0x4, 0x8},
+ SectionHeader{".ctors", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x80496a4, 0x6a4, 0x8, 0x0, 0x0, 0x4, 0x0},
+ SectionHeader{".dtors", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x80496ac, 0x6ac, 0x8, 0x0, 0x0, 0x4, 0x0},
+ SectionHeader{".jcr", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x80496b4, 0x6b4, 0x4, 0x0, 0x0, 0x4, 0x0},
+ SectionHeader{".got", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x80496b8, 0x6b8, 0x1c, 0x0, 0x0, 0x4, 0x4},
+ SectionHeader{".bss", SHT_NOBITS, SHF_WRITE + SHF_ALLOC, 0x80496d4, 0x6d4, 0x20, 0x0, 0x0, 0x4, 0x0},
SectionHeader{".comment", SHT_PROGBITS, 0x0, 0x0, 0x6d4, 0x12d, 0x0, 0x0, 0x1, 0x0},
SectionHeader{".debug_aranges", SHT_PROGBITS, 0x0, 0x0, 0x801, 0x20, 0x0, 0x0, 0x1, 0x0},
SectionHeader{".debug_pubnames", SHT_PROGBITS, 0x0, 0x0, 0x821, 0x1b, 0x0, 0x0, 0x1, 0x0},
@@ -75,21 +75,21 @@ var fileTests = []fileTest{
SectionHeader{".rodata", SHT_PROGBITS, SHF_ALLOC, 0x4005a4, 0x5a4, 0x11, 0x0, 0x0, 0x4, 0x0},
SectionHeader{".eh_frame_hdr", SHT_PROGBITS, SHF_ALLOC, 0x4005b8, 0x5b8, 0x24, 0x0, 0x0, 0x4, 0x0},
SectionHeader{".eh_frame", SHT_PROGBITS, SHF_ALLOC, 0x4005e0, 0x5e0, 0xa4, 0x0, 0x0, 0x8, 0x0},
- SectionHeader{".ctors", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x600688, 0x688, 0x10, 0x0, 0x0, 0x8, 0x0},
- SectionHeader{".dtors", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x600698, 0x698, 0x10, 0x0, 0x0, 0x8, 0x0},
- SectionHeader{".jcr", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x6006a8, 0x6a8, 0x8, 0x0, 0x0, 0x8, 0x0},
- SectionHeader{".dynamic", SHT_DYNAMIC, SHF_WRITE+SHF_ALLOC, 0x6006b0, 0x6b0, 0x1a0, 0x6, 0x0, 0x8, 0x10},
- SectionHeader{".got", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x600850, 0x850, 0x8, 0x0, 0x0, 0x8, 0x8},
- SectionHeader{".got.plt", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x600858, 0x858, 0x28, 0x0, 0x0, 0x8, 0x8},
- SectionHeader{".data", SHT_PROGBITS, SHF_WRITE+SHF_ALLOC, 0x600880, 0x880, 0x18, 0x0, 0x0, 0x8, 0x0},
- SectionHeader{".bss", SHT_NOBITS, SHF_WRITE+SHF_ALLOC, 0x600898, 0x898, 0x8, 0x0, 0x0, 0x4, 0x0},
+ SectionHeader{".ctors", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x600688, 0x688, 0x10, 0x0, 0x0, 0x8, 0x0},
+ SectionHeader{".dtors", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x600698, 0x698, 0x10, 0x0, 0x0, 0x8, 0x0},
+ SectionHeader{".jcr", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x6006a8, 0x6a8, 0x8, 0x0, 0x0, 0x8, 0x0},
+ SectionHeader{".dynamic", SHT_DYNAMIC, SHF_WRITE + SHF_ALLOC, 0x6006b0, 0x6b0, 0x1a0, 0x6, 0x0, 0x8, 0x10},
+ SectionHeader{".got", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x600850, 0x850, 0x8, 0x0, 0x0, 0x8, 0x8},
+ SectionHeader{".got.plt", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x600858, 0x858, 0x28, 0x0, 0x0, 0x8, 0x8},
+ SectionHeader{".data", SHT_PROGBITS, SHF_WRITE + SHF_ALLOC, 0x600880, 0x880, 0x18, 0x0, 0x0, 0x8, 0x0},
+ SectionHeader{".bss", SHT_NOBITS, SHF_WRITE + SHF_ALLOC, 0x600898, 0x898, 0x8, 0x0, 0x0, 0x4, 0x0},
SectionHeader{".comment", SHT_PROGBITS, 0x0, 0x0, 0x898, 0x126, 0x0, 0x0, 0x1, 0x0},
SectionHeader{".debug_aranges", SHT_PROGBITS, 0x0, 0x0, 0x9c0, 0x90, 0x0, 0x0, 0x10, 0x0},
SectionHeader{".debug_pubnames", SHT_PROGBITS, 0x0, 0x0, 0xa50, 0x25, 0x0, 0x0, 0x1, 0x0},
SectionHeader{".debug_info", SHT_PROGBITS, 0x0, 0x0, 0xa75, 0x1a7, 0x0, 0x0, 0x1, 0x0},
SectionHeader{".debug_abbrev", SHT_PROGBITS, 0x0, 0x0, 0xc1c, 0x6f, 0x0, 0x0, 0x1, 0x0},
SectionHeader{".debug_line", SHT_PROGBITS, 0x0, 0x0, 0xc8b, 0x13f, 0x0, 0x0, 0x1, 0x0},
- SectionHeader{".debug_str", SHT_PROGBITS, SHF_MERGE+SHF_STRINGS, 0x0, 0xdca, 0xb1, 0x0, 0x0, 0x1, 0x1},
+ SectionHeader{".debug_str", SHT_PROGBITS, SHF_MERGE + SHF_STRINGS, 0x0, 0xdca, 0xb1, 0x0, 0x0, 0x1, 0x1},
SectionHeader{".debug_ranges", SHT_PROGBITS, 0x0, 0x0, 0xe80, 0x90, 0x0, 0x0, 0x10, 0x0},
SectionHeader{".shstrtab", SHT_STRTAB, 0x0, 0x0, 0xf10, 0x149, 0x0, 0x0, 0x1, 0x0},
SectionHeader{".symtab", SHT_SYMTAB, 0x0, 0x0, 0x19a0, 0x6f0, 0x24, 0x39, 0x8, 0x18},
diff --git a/src/pkg/debug/gosym/pclntab.go b/src/pkg/debug/gosym/pclntab.go
index 458b5243d..24c368616 100644
--- a/src/pkg/debug/gosym/pclntab.go
+++ b/src/pkg/debug/gosym/pclntab.go
@@ -46,7 +46,7 @@ func (t *LineTable) parse(targetPC uint64, targetLine int) (b []byte, pc uint64,
case code <= 128:
line -= int(code-64);
default:
- pc += quantum * uint64(code-128);
+ pc += quantum*uint64(code-128);
continue;
}
pc += quantum;
diff --git a/src/pkg/debug/proc/proc_linux.go b/src/pkg/debug/proc/proc_linux.go
index 82291be23..d2c8b8af2 100644
--- a/src/pkg/debug/proc/proc_linux.go
+++ b/src/pkg/debug/proc/proc_linux.go
@@ -515,7 +515,7 @@ func (ev *debugEvent) doTrap() (threadState, os.Error) {
return stopped, err;
}
- b, ok := t.proc.breakpoints[uintptr(regs.PC()) - uintptr(len(bpinst386))];
+ b, ok := t.proc.breakpoints[uintptr(regs.PC())-uintptr(len(bpinst386))];
if !ok {
// We must have hit a breakpoint that was actually in
// the program. Leave the IP where it is so we don't
@@ -1218,7 +1218,7 @@ func (p *process) attachAllThreads() os.Error {
if err != nil {
// There could have been a race, or
// this process could be a zobmie.
- statFile, err2 := io.ReadFile(taskPath + "/" + tidStr + "/stat");
+ statFile, err2 := io.ReadFile(taskPath+"/"+tidStr+"/stat");
if err2 != nil {
switch err2 := err2.(type) {
case *os.PathError:
diff --git a/src/pkg/ebnf/parser.go b/src/pkg/ebnf/parser.go
index a3fbe6f60..325673caf 100644
--- a/src/pkg/ebnf/parser.go
+++ b/src/pkg/ebnf/parser.go
@@ -15,10 +15,10 @@ import (
type parser struct {
scanner.ErrorVector;
- scanner scanner.Scanner;
- pos token.Position; // token position
- tok token.Token; // one token look-ahead
- lit []byte; // token literal
+ scanner scanner.Scanner;
+ pos token.Position; // token position
+ tok token.Token; // one token look-ahead
+ lit []byte; // token literal
}
@@ -39,7 +39,7 @@ func (p *parser) errorExpected(pos token.Position, msg string) {
// make the error message more specific
msg += ", found '" + p.tok.String() + "'";
if p.tok.IsLiteral() {
- msg += " " + string(p.lit);
+ msg += " "+string(p.lit);
}
}
p.Error(pos, msg);
@@ -51,7 +51,7 @@ func (p *parser) expect(tok token.Token) token.Position {
if p.tok != tok {
p.errorExpected(pos, "'" + tok.String() + "'");
}
- p.next(); // make progress in any case
+ p.next(); // make progress in any case
return pos;
}
@@ -185,7 +185,7 @@ func (p *parser) parse(filename string, src []byte) Grammar {
// initialize parser
p.ErrorVector.Init();
p.scanner.Init(filename, src, p, 0);
- p.next(); // initializes pos, tok, lit
+ p.next(); // initializes pos, tok, lit
grammar := make(Grammar);
for p.tok != token.EOF {
diff --git a/src/pkg/flag/flag.go b/src/pkg/flag/flag.go
index 067e45d95..02f8187d6 100644
--- a/src/pkg/flag/flag.go
+++ b/src/pkg/flag/flag.go
@@ -396,7 +396,7 @@ func (f *allFlags) parseOne(index int) (ok bool, next int) {
return false, index+1;
}
}
- name := s[num_minuses:len(s)];
+ name := s[num_minuses : len(s)];
if len(name) == 0 || name[0] == '-' || name[0] == '=' {
fmt.Fprintln(os.Stderr, "bad flag syntax:", s);
Usage();
@@ -439,7 +439,7 @@ func (f *allFlags) parseOne(index int) (ok bool, next int) {
}
} else {
// It must have a value, which might be the next argument.
- if !has_value && index < len(os.Args) - 1 {
+ if !has_value && index < len(os.Args)-1 {
// value is the next arg
has_value = true;
index++;
diff --git a/src/pkg/fmt/print.go b/src/pkg/fmt/print.go
index 64f6f6ad4..cfd2849b8 100644
--- a/src/pkg/fmt/print.go
+++ b/src/pkg/fmt/print.go
@@ -164,7 +164,7 @@ func (p *pp) ensure(n int) {
if len(p.buf) < n {
newn := allocSize + len(p.buf);
if newn < n {
- newn = n+allocSize;
+ newn = n + allocSize;
}
b := make([]byte, newn);
for i := 0; i < p.n; i++ {
diff --git a/src/pkg/go/ast/filter.go b/src/pkg/go/ast/filter.go
index 9d0679a5a..ee6747f65 100644
--- a/src/pkg/go/ast/filter.go
+++ b/src/pkg/go/ast/filter.go
@@ -203,7 +203,7 @@ func MergePackageFiles(pkg *Package) *File {
ndecls := 0;
for _, f := range pkg.Files {
if f.Doc != nil {
- ncomments += len(f.Doc.List) + 1; // +1 for separator
+ ncomments += len(f.Doc.List)+1; // +1 for separator
}
ndecls += len(f.Decls);
}
@@ -215,7 +215,7 @@ func MergePackageFiles(pkg *Package) *File {
// than drop them on the floor.
var doc *CommentGroup;
if ncomments > 0 {
- list := make([]*Comment, ncomments-1); // -1: no separator before first group
+ list := make([]*Comment, ncomments - 1); // -1: no separator before first group
i := 0;
for _, f := range pkg.Files {
if f.Doc != nil {
diff --git a/src/pkg/go/doc/doc.go b/src/pkg/go/doc/doc.go
index 9b6b34d6b..130533ebd 100644
--- a/src/pkg/go/doc/doc.go
+++ b/src/pkg/go/doc/doc.go
@@ -18,11 +18,11 @@ import (
type typeDoc struct {
// len(decl.Specs) == 1, and the element type is *ast.TypeSpec
// if the type declaration hasn't been seen yet, decl is nil
- decl *ast.GenDecl;
+ decl *ast.GenDecl;
// values, factory functions, and methods associated with the type
- values *vector.Vector; // list of *ast.GenDecl (consts and vars)
- factories map[string] *ast.FuncDecl;
- methods map[string] *ast.FuncDecl;
+ values *vector.Vector; // list of *ast.GenDecl (consts and vars)
+ factories map[string]*ast.FuncDecl;
+ methods map[string]*ast.FuncDecl;
}
@@ -34,18 +34,18 @@ type typeDoc struct {
// printing the corresponding AST node).
//
type docReader struct {
- doc *ast.CommentGroup; // package documentation, if any
- values *vector.Vector; // list of *ast.GenDecl (consts and vars)
- types map[string] *typeDoc;
- funcs map[string] *ast.FuncDecl;
- bugs *vector.Vector; // list of *ast.CommentGroup
+ doc *ast.CommentGroup; // package documentation, if any
+ values *vector.Vector; // list of *ast.GenDecl (consts and vars)
+ types map[string]*typeDoc;
+ funcs map[string]*ast.FuncDecl;
+ bugs *vector.Vector; // list of *ast.CommentGroup
}
func (doc *docReader) init() {
doc.values = vector.New(0);
- doc.types = make(map[string] *typeDoc);
- doc.funcs = make(map[string] *ast.FuncDecl);
+ doc.types = make(map[string]*typeDoc);
+ doc.funcs = make(map[string]*ast.FuncDecl);
doc.bugs = vector.New(0);
}
@@ -65,13 +65,13 @@ func (doc *docReader) addType(decl *ast.GenDecl) {
func (doc *docReader) lookupTypeDoc(name string) *typeDoc {
if name == "" {
- return nil; // no type docs for anonymous types
+ return nil; // no type docs for anonymous types
}
if tdoc, found := doc.types[name]; found {
return tdoc;
}
// type wasn't found - add one without declaration
- tdoc := &typeDoc{nil, vector.New(0), make(map[string] *ast.FuncDecl), make(map[string] *ast.FuncDecl)};
+ tdoc := &typeDoc{nil, vector.New(0), make(map[string]*ast.FuncDecl), make(map[string]*ast.FuncDecl)};
doc.types[name] = tdoc;
return tdoc;
}
@@ -136,7 +136,7 @@ func (doc *docReader) addValue(decl *ast.GenDecl) {
// typed entries are sufficiently frequent
typ := doc.lookupTypeDoc(domName);
if typ != nil {
- values = typ.values; // associate with that type
+ values = typ.values; // associate with that type
}
}
@@ -207,7 +207,7 @@ func (doc *docReader) addDecl(decl ast.Decl) {
// would lose GenDecl documentation if the TypeSpec
// has documentation as well.
doc.addType(&ast.GenDecl{d.Doc, d.Pos(), token.TYPE, noPos, []ast.Spec{spec}, noPos});
- // A new GenDecl node is created, no need to nil out d.Doc.
+ // A new GenDecl node is created, no need to nil out d.Doc.
}
}
}
@@ -228,8 +228,8 @@ func copyCommentList(list []*ast.Comment) []*ast.Comment {
var (
// Regexp constructor needs threads - cannot use init expressions
- bug_markers *regexp.Regexp;
- bug_content *regexp.Regexp;
+ bug_markers *regexp.Regexp;
+ bug_content *regexp.Regexp;
)
func makeRex(s string) *regexp.Regexp {
@@ -245,8 +245,8 @@ func makeRex(s string) *regexp.Regexp {
//
func (doc *docReader) addFile(src *ast.File) {
if bug_markers == nil {
- bug_markers = makeRex("^/[/*][ \t]*BUG\\(.*\\):[ \t]*"); // BUG(uid):
- bug_content = makeRex("[^ \n\r\t]+"); // at least one non-whitespace char
+ bug_markers = makeRex("^/[/*][ \t]*BUG\\(.*\\):[ \t]*"); // BUG(uid):
+ bug_content = makeRex("[^ \n\r\t]+"); // at least one non-whitespace char
}
// add package documentation
@@ -257,7 +257,7 @@ func (doc *docReader) addFile(src *ast.File) {
// comments correctly (but currently looses BUG(...)
// comments).
doc.doc = src.Doc;
- src.Doc = nil; // doc consumed - remove from ast.File node
+ src.Doc = nil; // doc consumed - remove from ast.File node
}
// add all declarations
@@ -271,15 +271,15 @@ func (doc *docReader) addFile(src *ast.File) {
cstr := string(text);
if m := bug_markers.ExecuteString(cstr); len(m) > 0 {
// found a BUG comment; maybe empty
- if bstr := cstr[m[1] : len(cstr)]; bug_content.MatchString(bstr) {
+ if bstr := cstr[m[1]:len(cstr)]; bug_content.MatchString(bstr) {
// non-empty BUG comment; collect comment without BUG prefix
list := copyCommentList(c.List);
- list[0].Text = text[m[1] : len(text)];
+ list[0].Text = text[m[1]:len(text)];
doc.bugs.Push(&ast.CommentGroup{list, nil});
}
}
}
- src.Comments = nil; // consumed unassociated comments - remove from ast.File node
+ src.Comments = nil; // consumed unassociated comments - remove from ast.File node
}
@@ -312,19 +312,24 @@ func NewPackageDoc(pkg *ast.Package, importpath string) *PackageDoc {
// values, either vars or consts.
//
type ValueDoc struct {
- Doc string;
- Decl *ast.GenDecl;
- order int;
+ Doc string;
+ Decl *ast.GenDecl;
+ order int;
}
type sortValueDoc []*ValueDoc
-func (p sortValueDoc) Len() int { return len(p); }
-func (p sortValueDoc) Swap(i, j int) { p[i], p[j] = p[j], p[i]; }
+
+func (p sortValueDoc) Len() int {
+ return len(p);
+}
+func (p sortValueDoc) Swap(i, j int) {
+ p[i], p[j] = p[j], p[i];
+}
func declName(d *ast.GenDecl) string {
if len(d.Specs) != 1 {
- return ""
+ return "";
}
switch v := d.Specs[0].(type) {
@@ -350,17 +355,17 @@ func (p sortValueDoc) Less(i, j int) bool {
func makeValueDocs(v *vector.Vector, tok token.Token) []*ValueDoc {
- d := make([]*ValueDoc, v.Len()); // big enough in any case
+ d := make([]*ValueDoc, v.Len()); // big enough in any case
n := 0;
for i := range d {
decl := v.At(i).(*ast.GenDecl);
if decl.Tok == tok {
d[n] = &ValueDoc{CommentText(decl.Doc), decl, i};
n++;
- decl.Doc = nil; // doc consumed - removed from AST
+ decl.Doc = nil; // doc consumed - removed from AST
}
}
- d = d[0 : n];
+ d = d[0:n];
sort.Sort(sortValueDoc(d));
return d;
}
@@ -370,25 +375,32 @@ func makeValueDocs(v *vector.Vector, tok token.Token) []*ValueDoc {
// either a top-level function or a method function.
//
type FuncDoc struct {
- Doc string;
- Recv ast.Expr; // TODO(rsc): Would like string here
- Name string;
- Decl *ast.FuncDecl;
+ Doc string;
+ Recv ast.Expr; // TODO(rsc): Would like string here
+ Name string;
+ Decl *ast.FuncDecl;
}
type sortFuncDoc []*FuncDoc
-func (p sortFuncDoc) Len() int { return len(p); }
-func (p sortFuncDoc) Swap(i, j int) { p[i], p[j] = p[j], p[i]; }
-func (p sortFuncDoc) Less(i, j int) bool { return p[i].Name < p[j].Name; }
+
+func (p sortFuncDoc) Len() int {
+ return len(p);
+}
+func (p sortFuncDoc) Swap(i, j int) {
+ p[i], p[j] = p[j], p[i];
+}
+func (p sortFuncDoc) Less(i, j int) bool {
+ return p[i].Name < p[j].Name;
+}
-func makeFuncDocs(m map[string] *ast.FuncDecl) []*FuncDoc {
+func makeFuncDocs(m map[string]*ast.FuncDecl) []*FuncDoc {
d := make([]*FuncDoc, len(m));
i := 0;
for _, f := range m {
doc := new(FuncDoc);
doc.Doc = CommentText(f.Doc);
- f.Doc = nil; // doc consumed - remove from ast.FuncDecl node
+ f.Doc = nil; // doc consumed - remove from ast.FuncDecl node
if f.Recv != nil {
doc.Recv = f.Recv.Type;
}
@@ -407,19 +419,24 @@ func makeFuncDocs(m map[string] *ast.FuncDecl) []*FuncDoc {
// Factories is a sorted list of factory functions that return that type.
// Methods is a sorted list of method functions on that type.
type TypeDoc struct {
- Doc string;
- Type *ast.TypeSpec;
- Consts []*ValueDoc;
- Vars []*ValueDoc;
- Factories []*FuncDoc;
- Methods []*FuncDoc;
- Decl *ast.GenDecl;
- order int;
+ Doc string;
+ Type *ast.TypeSpec;
+ Consts []*ValueDoc;
+ Vars []*ValueDoc;
+ Factories []*FuncDoc;
+ Methods []*FuncDoc;
+ Decl *ast.GenDecl;
+ order int;
}
type sortTypeDoc []*TypeDoc
-func (p sortTypeDoc) Len() int { return len(p); }
-func (p sortTypeDoc) Swap(i, j int) { p[i], p[j] = p[j], p[i]; }
+
+func (p sortTypeDoc) Len() int {
+ return len(p);
+}
+func (p sortTypeDoc) Swap(i, j int) {
+ p[i], p[j] = p[j], p[i];
+}
func (p sortTypeDoc) Less(i, j int) bool {
// sort by name
// pull blocks (name = "") up to top
@@ -434,7 +451,7 @@ func (p sortTypeDoc) Less(i, j int) bool {
// NOTE(rsc): This would appear not to be correct for type ( )
// blocks, but the doc extractor above has split them into
// individual declarations.
-func (doc *docReader) makeTypeDocs(m map[string] *typeDoc) []*TypeDoc {
+func (doc *docReader) makeTypeDocs(m map[string]*typeDoc) []*TypeDoc {
d := make([]*TypeDoc, len(m));
i := 0;
for _, old := range m {
@@ -445,12 +462,12 @@ func (doc *docReader) makeTypeDocs(m map[string] *typeDoc) []*TypeDoc {
typespec := decl.Specs[0].(*ast.TypeSpec);
t := new(TypeDoc);
doc := typespec.Doc;
- typespec.Doc = nil; // doc consumed - remove from ast.TypeSpec node
+ typespec.Doc = nil; // doc consumed - remove from ast.TypeSpec node
if doc == nil {
// no doc associated with the spec, use the declaration doc, if any
doc = decl.Doc;
}
- decl.Doc = nil; // doc consumed - remove from ast.Decl node
+ decl.Doc = nil; // doc consumed - remove from ast.Decl node
t.Doc = CommentText(doc);
t.Type = typespec;
t.Consts = makeValueDocs(old.values, token.CONST);
@@ -482,7 +499,7 @@ func (doc *docReader) makeTypeDocs(m map[string] *typeDoc) []*TypeDoc {
}
}
}
- d = d[0 : i]; // some types may have been ignored
+ d = d[0:i]; // some types may have been ignored
sort.Sort(sortTypeDoc(d));
return d;
}
@@ -500,16 +517,16 @@ func makeBugDocs(v *vector.Vector) []string {
// PackageDoc is the documentation for an entire package.
//
type PackageDoc struct {
- PackageName string;
- ImportPath string;
- FilePath string;
- Filenames []string;
- Doc string;
- Consts []*ValueDoc;
- Types []*TypeDoc;
- Vars []*ValueDoc;
- Funcs []*FuncDoc;
- Bugs []string;
+ PackageName string;
+ ImportPath string;
+ FilePath string;
+ Filenames []string;
+ Doc string;
+ Consts []*ValueDoc;
+ Types []*TypeDoc;
+ Vars []*ValueDoc;
+ Funcs []*FuncDoc;
+ Bugs []string;
}
@@ -544,11 +561,11 @@ func isRegexp(s string) bool {
for _, c := range s {
for _, m := range metachars {
if c == m {
- return true
+ return true;
}
}
}
- return false
+ return false;
}
@@ -594,7 +611,7 @@ func filterValueDocs(a []*ValueDoc, names []string) []*ValueDoc {
w++;
}
}
- return a[0 : w];
+ return a[0:w];
}
@@ -606,7 +623,7 @@ func filterFuncDocs(a []*FuncDoc, names []string) []*FuncDoc {
w++;
}
}
- return a[0 : w];
+ return a[0:w];
}
@@ -627,7 +644,7 @@ func filterTypeDocs(a []*TypeDoc, names []string) []*TypeDoc {
w++;
}
}
- return a[0 : w];
+ return a[0:w];
}
@@ -643,4 +660,3 @@ func (p *PackageDoc) Filter(names []string) {
p.Funcs = filterFuncDocs(p.Funcs, names);
p.Doc = ""; // don't show top-level package doc
}
-
diff --git a/src/pkg/gob/encode.go b/src/pkg/gob/encode.go
index 33a0111a0..1f9d32310 100644
--- a/src/pkg/gob/encode.go
+++ b/src/pkg/gob/encode.go
@@ -23,7 +23,7 @@ type encoderState struct {
b *bytes.Buffer;
err os.Error; // error encountered during encoding;
fieldnum int; // the last field number written.
- buf [1+uint64Size]byte; // buffer used by the encoder; here to avoid allocation.
+ buf [1 + uint64Size]byte; // buffer used by the encoder; here to avoid allocation.
}
// Unsigned integers have a two-state encoding. If the number is less
@@ -49,7 +49,7 @@ func encodeUint(state *encoderState, x uint64) {
m--;
}
state.buf[m] = uint8(-(n-1));
- n, state.err = state.b.Write(state.buf[m : uint64Size+1]);
+ n, state.err = state.b.Write(state.buf[m : uint64Size + 1]);
}
// encodeInt writes an encoded signed integer to state.w.
@@ -58,7 +58,7 @@ func encodeUint(state *encoderState, x uint64) {
func encodeInt(state *encoderState, i int64) {
var x uint64;
if i < 0 {
- x = uint64(^i << 1) | 1;
+ x = uint64(^i << 1)|1;
} else {
x = uint64(i<<1);
}
diff --git a/src/pkg/http/client.go b/src/pkg/http/client.go
index e5e45293e..6ac602f27 100644
--- a/src/pkg/http/client.go
+++ b/src/pkg/http/client.go
@@ -49,7 +49,7 @@ func (r *Response) AddHeader(key, value string) {
oldValues, oldValuesPresent := r.Header[key];
if oldValuesPresent {
- r.Header[key] = oldValues+","+value;
+ r.Header[key] = oldValues + "," + value;
} else {
r.Header[key] = value;
}
diff --git a/src/pkg/http/request.go b/src/pkg/http/request.go
index 7ca7f0dc6..5319c589c 100644
--- a/src/pkg/http/request.go
+++ b/src/pkg/http/request.go
@@ -21,25 +21,26 @@ import (
)
const (
- maxLineLength = 1024; // assumed < bufio.DefaultBufSize
- maxValueLength = 1024;
- maxHeaderLines = 1024;
- chunkSize = 4 << 10; // 4 KB chunks
+ maxLineLength = 1024; // assumed < bufio.DefaultBufSize
+ maxValueLength = 1024;
+ maxHeaderLines = 1024;
+ chunkSize = 4<<10; // 4 KB chunks
)
// HTTP request parsing errors.
type ProtocolError struct {
- os.ErrorString
+ os.ErrorString;
}
+
var (
- ErrLineTooLong = &ProtocolError{"header line too long"};
- ErrHeaderTooLong = &ProtocolError{"header too long"};
- ErrShortBody = &ProtocolError{"entity body too short"};
+ ErrLineTooLong = &ProtocolError{"header line too long"};
+ ErrHeaderTooLong = &ProtocolError{"header too long"};
+ ErrShortBody = &ProtocolError{"entity body too short"};
)
type badStringError struct {
- what string;
- str string;
+ what string;
+ str string;
}
func (e *badStringError) String() string {
@@ -48,12 +49,12 @@ func (e *badStringError) String() string {
// A Request represents a parsed HTTP request header.
type Request struct {
- Method string; // GET, POST, PUT, etc.
- RawUrl string; // The raw URL given in the request.
- Url *URL; // Parsed URL.
- Proto string; // "HTTP/1.0"
- ProtoMajor int; // 1
- ProtoMinor int; // 0
+ Method string; // GET, POST, PUT, etc.
+ RawUrl string; // The raw URL given in the request.
+ Url *URL; // Parsed URL.
+ Proto string; // "HTTP/1.0"
+ ProtoMajor int; // 1
+ ProtoMinor int; // 0
// A header mapping request lines to their values.
// If the header says
@@ -74,18 +75,18 @@ type Request struct {
// The request parser implements this by canonicalizing the
// name, making the first character and any characters
// following a hyphen uppercase and the rest lowercase.
- Header map[string] string;
+ Header map[string]string;
// The message body.
- Body io.Reader;
+ Body io.Reader;
// Whether to close the connection after replying to this request.
- Close bool;
+ Close bool;
// The host on which the URL is sought.
// Per RFC 2616, this is either the value of the Host: header
// or the host name given in the URL itself.
- Host string;
+ Host string;
// The referring URL, if sent in the request.
//
@@ -97,21 +98,20 @@ type Request struct {
// can diagnose programs that use the alternate
// (correct English) spelling req.Referrer but cannot
// diagnose programs that use Header["Referrer"].
- Referer string;
+ Referer string;
// The User-Agent: header string, if sent in the request.
- UserAgent string;
+ UserAgent string;
// The parsed form. Only available after ParseForm is called.
- Form map[string] []string;
-
+ Form map[string][]string;
}
// ProtoAtLeast returns whether the HTTP protocol used
// in the request is at least major.minor.
func (r *Request) ProtoAtLeast(major, minor int) bool {
return r.ProtoMajor > major ||
- r.ProtoMajor == major && r.ProtoMinor >= minor
+ r.ProtoMajor == major && r.ProtoMinor >= minor;
}
// Return value if nonempty, def otherwise.
@@ -123,7 +123,7 @@ func valueOrDefault(value, def string) string {
}
// TODO(rsc): Change default UserAgent before open-source release.
-const defaultUserAgent = "http.Client";
+const defaultUserAgent = "http.Client"
// Write an HTTP/1.1 request -- header and body -- in wire format.
// This method consults the following fields of req:
@@ -162,7 +162,7 @@ func (req *Request) write(w io.Writer) os.Error {
// Response.{GetHeader,AddHeader} and string constants for "Host",
// "User-Agent" and "Referer".
for k, v := range req.Header {
- io.WriteString(w, k + ": " + v + "\r\n");
+ io.WriteString(w, k+": "+v+"\r\n");
}
io.WriteString(w, "\r\n");
@@ -183,7 +183,7 @@ func (req *Request) write(w io.Writer) os.Error {
switch {
case er != nil:
if er == os.EOF {
- break Loop
+ break Loop;
}
return er;
case ew != nil:
@@ -210,29 +210,29 @@ func readLineBytes(b *bufio.Reader) (p []byte, err os.Error) {
if err == os.EOF {
err = io.ErrUnexpectedEOF;
}
- return nil, err
+ return nil, err;
}
if len(p) >= maxLineLength {
- return nil, ErrLineTooLong
+ return nil, ErrLineTooLong;
}
// Chop off trailing white space.
var i int;
for i = len(p); i > 0; i-- {
if c := p[i-1]; c != ' ' && c != '\r' && c != '\t' && c != '\n' {
- break
+ break;
}
}
- return p[0:i], nil
+ return p[0:i], nil;
}
// readLineBytes, but convert the bytes into a string.
func readLine(b *bufio.Reader) (s string, err os.Error) {
p, e := readLineBytes(b);
if e != nil {
- return "", e
+ return "", e;
}
- return string(p), nil
+ return string(p), nil;
}
var colon = []byte{':'}
@@ -244,10 +244,10 @@ var colon = []byte{':'}
func readKeyValue(b *bufio.Reader) (key, value string, err os.Error) {
line, e := readLineBytes(b);
if e != nil {
- return "", "", e
+ return "", "", e;
}
if len(line) == 0 {
- return "", "", nil
+ return "", "", nil;
}
// Scan first line for colon.
@@ -265,7 +265,7 @@ func readKeyValue(b *bufio.Reader) (key, value string, err os.Error) {
// Skip initial space before value.
for i++; i < len(line); i++ {
if line[i] != ' ' {
- break
+ break;
}
}
value = string(line[i:len(line)]);
@@ -286,16 +286,16 @@ func readKeyValue(b *bufio.Reader) (key, value string, err os.Error) {
if e == os.EOF {
e = io.ErrUnexpectedEOF;
}
- return "", "", e
+ return "", "", e;
}
}
b.UnreadByte();
// Read the rest of the line and add to value.
if line, e = readLineBytes(b); e != nil {
- return "", "", e
+ return "", "", e;
}
- value += " " + string(line);
+ value += " "+string(line);
if len(value) >= maxValueLength {
return "", "", &badStringError{"value too long for key", key};
@@ -313,33 +313,33 @@ Malformed:
func atoi(s string, i int) (n, i1 int, ok bool) {
const Big = 1000000;
if i >= len(s) || s[i] < '0' || s[i] > '9' {
- return 0, 0, false
+ return 0, 0, false;
}
n = 0;
for ; i < len(s) && '0' <= s[i] && s[i] <= '9'; i++ {
n = n*10 + int(s[i]-'0');
if n > Big {
- return 0, 0, false
+ return 0, 0, false;
}
}
- return n, i, true
+ return n, i, true;
}
// Parse HTTP version: "HTTP/1.2" -> (1, 2, true).
func parseHTTPVersion(vers string) (int, int, bool) {
if vers[0:5] != "HTTP/" {
- return 0, 0, false
+ return 0, 0, false;
}
major, i, ok := atoi(vers, 5);
if !ok || i >= len(vers) || vers[i] != '.' {
- return 0, 0, false
+ return 0, 0, false;
}
var minor int;
minor, i, ok = atoi(vers, i+1);
if !ok || i != len(vers) {
- return 0, 0, false
+ return 0, 0, false;
}
- return major, minor, true
+ return major, minor, true;
}
var cmap = make(map[string]string)
@@ -360,12 +360,12 @@ func CanonicalHeaderKey(s string) string {
// HTTP headers are ASCII only, so no Unicode issues.
a := strings.Bytes(s);
upper := true;
- for i,v := range a {
+ for i, v := range a {
if upper && 'a' <= v && v <= 'z' {
- a[i] = v + 'A' - 'a';
+ a[i] = v+'A'-'a';
}
if !upper && 'A' <= v && v <= 'Z' {
- a[i] = v + 'a' - 'A';
+ a[i] = v+'a'-'A';
}
upper = false;
if v == '-' {
@@ -378,13 +378,13 @@ func CanonicalHeaderKey(s string) string {
}
type chunkedReader struct {
- r *bufio.Reader;
- n uint64; // unread bytes in chunk
- err os.Error;
+ r *bufio.Reader;
+ n uint64; // unread bytes in chunk
+ err os.Error;
}
func newChunkedReader(r *bufio.Reader) *chunkedReader {
- return &chunkedReader{ r: r }
+ return &chunkedReader{r: r};
}
func (cr *chunkedReader) beginChunk() {
@@ -392,21 +392,21 @@ func (cr *chunkedReader) beginChunk() {
var line string;
line, cr.err = readLine(cr.r);
if cr.err != nil {
- return
+ return;
}
cr.n, cr.err = strconv.Btoui64(line, 16);
if cr.err != nil {
- return
+ return;
}
if cr.n == 0 {
// trailer CRLF
for {
line, cr.err = readLine(cr.r);
if cr.err != nil {
- return
+ return;
}
if line == "" {
- break
+ break;
}
}
cr.err = os.EOF;
@@ -415,16 +415,16 @@ func (cr *chunkedReader) beginChunk() {
func (cr *chunkedReader) Read(b []uint8) (n int, err os.Error) {
if cr.err != nil {
- return 0, cr.err
+ return 0, cr.err;
}
if cr.n == 0 {
cr.beginChunk();
if cr.err != nil {
- return 0, cr.err
+ return 0, cr.err;
}
}
if uint64(len(b)) > cr.n {
- b = b[0:cr.n];
+ b = b[0 : cr.n];
}
n, cr.err = cr.r.Read(b);
cr.n -= uint64(n);
@@ -437,7 +437,7 @@ func (cr *chunkedReader) Read(b []uint8) (n int, err os.Error) {
}
}
}
- return n, cr.err
+ return n, cr.err;
}
// ReadRequest reads and parses a request from b.
@@ -447,7 +447,7 @@ func ReadRequest(b *bufio.Reader) (req *Request, err os.Error) {
// First line: GET /index.html HTTP/1.0
var s string;
if s, err = readLine(b); err != nil {
- return nil, err
+ return nil, err;
}
var f []string;
@@ -461,22 +461,22 @@ func ReadRequest(b *bufio.Reader) (req *Request, err os.Error) {
}
if req.Url, err = ParseURL(req.RawUrl); err != nil {
- return nil, err
+ return nil, err;
}
// Subsequent lines: Key: value.
nheader := 0;
- req.Header = make(map[string] string);
+ req.Header = make(map[string]string);
for {
var key, value string;
if key, value, err = readKeyValue(b); err != nil {
- return nil, err
+ return nil, err;
}
if key == "" {
- break
+ break;
}
if nheader++; nheader >= maxHeaderLines {
- return nil, ErrHeaderTooLong
+ return nil, ErrHeaderTooLong;
}
key = CanonicalHeaderKey(key);
@@ -486,9 +486,9 @@ func ReadRequest(b *bufio.Reader) (req *Request, err os.Error) {
// to concatenating the values separated by commas.
oldvalue, present := req.Header[key];
if present {
- req.Header[key] = oldvalue+","+value
+ req.Header[key] = oldvalue+","+value;
} else {
- req.Header[key] = value
+ req.Header[key] = value;
}
}
@@ -500,7 +500,7 @@ func ReadRequest(b *bufio.Reader) (req *Request, err os.Error) {
// Host: doesntmatter
// the same. In the second case, any Host line is ignored.
if v, present := req.Header["Host"]; present && req.Url.Host == "" {
- req.Host = v
+ req.Host = v;
}
// RFC2616: Should treat
@@ -509,27 +509,27 @@ func ReadRequest(b *bufio.Reader) (req *Request, err os.Error) {
// Cache-Control: no-cache
if v, present := req.Header["Pragma"]; present && v == "no-cache" {
if _, presentcc := req.Header["Cache-Control"]; !presentcc {
- req.Header["Cache-Control"] = "no-cache"
+ req.Header["Cache-Control"] = "no-cache";
}
}
// Determine whether to hang up after sending the reply.
if req.ProtoMajor < 1 || (req.ProtoMajor == 1 && req.ProtoMinor < 1) {
- req.Close = true
+ req.Close = true;
} else if v, present := req.Header["Connection"]; present {
// TODO: Should split on commas, toss surrounding white space,
// and check each field.
if v == "close" {
- req.Close = true
+ req.Close = true;
}
}
// Pull out useful fields as a convenience to clients.
if v, present := req.Header["Referer"]; present {
- req.Referer = v
+ req.Referer = v;
}
if v, present := req.Header["User-Agent"]; present {
- req.UserAgent = v
+ req.UserAgent = v;
}
// TODO: Parse specific header values:
@@ -571,16 +571,16 @@ func ReadRequest(b *bufio.Reader) (req *Request, err os.Error) {
raw := make([]byte, length);
n, err := b.Read(raw);
if err != nil || uint64(n) < length {
- return nil, ErrShortBody
+ return nil, ErrShortBody;
}
req.Body = bytes.NewBuffer(raw);
}
- return req, nil
+ return req, nil;
}
-func parseForm(query string) (m map[string] []string, err os.Error) {
- data := make(map[string] *vector.StringVector);
+func parseForm(query string) (m map[string][]string, err os.Error) {
+ data := make(map[string]*vector.StringVector);
for _, kv := range strings.Split(query, "&", 0) {
kvPair := strings.Split(kv, "=", 2);
@@ -602,19 +602,19 @@ func parseForm(query string) (m map[string] []string, err os.Error) {
vec.Push(value);
}
- m = make(map[string] []string);
+ m = make(map[string][]string);
for k, vec := range data {
m[k] = vec.Data();
}
- return
+ return;
}
// ParseForm parses the request body as a form for POST requests, or the raw query for GET requests.
// It is idempotent.
func (r *Request) ParseForm() (err os.Error) {
if r.Form != nil {
- return
+ return;
}
var query string;
@@ -624,23 +624,23 @@ func (r *Request) ParseForm() (err os.Error) {
query = r.Url.RawQuery;
case "POST":
if r.Body == nil {
- return os.ErrorString("missing form body")
+ return os.ErrorString("missing form body");
}
ct, _ := r.Header["Content-Type"];
switch strings.Split(ct, ";", 2)[0] {
case "text/plain", "application/x-www-form-urlencoded", "":
var b []byte;
if b, err = io.ReadAll(r.Body); err != nil {
- return
+ return;
}
query = string(b);
// TODO(dsymonds): Handle multipart/form-data
default:
- return &badStringError{"unknown Content-Type", ct}
+ return &badStringError{"unknown Content-Type", ct};
}
}
r.Form, err = parseForm(query);
- return
+ return;
}
// FormValue returns the first value for the named component of the query.
@@ -650,7 +650,7 @@ func (r *Request) FormValue(key string) string {
r.ParseForm();
}
if vs, ok := r.Form[key]; ok && len(vs) > 0 {
- return vs[0]
+ return vs[0];
}
- return ""
+ return "";
}
diff --git a/src/pkg/http/server.go b/src/pkg/http/server.go
index 5e8a67f51..e131213df 100644
--- a/src/pkg/http/server.go
+++ b/src/pkg/http/server.go
@@ -146,7 +146,7 @@ func (c *Conn) WriteHeader(code int) {
if !ok {
text = "status code " + codestring;
}
- io.WriteString(c.buf, proto+" "+codestring+" "+text+"\r\n");
+ io.WriteString(c.buf, proto + " " + codestring + " " + text + "\r\n");
for k, v := range c.header {
io.WriteString(c.buf, k+": "+v+"\r\n");
}
diff --git a/src/pkg/image/png/reader.go b/src/pkg/image/png/reader.go
index 8e6ae489b..ca0070419 100644
--- a/src/pkg/image/png/reader.go
+++ b/src/pkg/image/png/reader.go
@@ -203,7 +203,7 @@ func (d *decoder) idatReader(idat io.Reader) os.Error {
case ctPaletted:
bpp = 1;
paletted = d.image.(*image.Paletted);
- maxPalette = uint8(len(paletted.Palette) - 1);
+ maxPalette = uint8(len(paletted.Palette)-1);
case ctTrueColorAlpha:
bpp = 4;
nrgba = d.image.(*image.NRGBA);
diff --git a/src/pkg/io/pipe.go b/src/pkg/io/pipe.go
index 5cb60bd85..ff52988b2 100644
--- a/src/pkg/io/pipe.go
+++ b/src/pkg/io/pipe.go
@@ -54,7 +54,7 @@ func (p *pipe) Read(data []byte) (n int, err os.Error) {
data[i] = p.wpend[i];
}
p.wtot += n;
- p.wpend = p.wpend[n : len(p.wpend)];
+ p.wpend = p.wpend[n:len(p.wpend)];
// If write block is done, finish the write.
if len(p.wpend) == 0 {
diff --git a/src/pkg/log/log.go b/src/pkg/log/log.go
index f29359ee3..80c72a6bc 100644
--- a/src/pkg/log/log.go
+++ b/src/pkg/log/log.go
@@ -75,7 +75,7 @@ func itoa(i int, wid int) string {
for ; u > 0 || wid > 0; u /= 10 {
bp--;
wid--;
- b[bp] = byte(u%10) + '0';
+ b[bp] = byte(u%10)+'0';
}
return string(b[bp:len(b)]);
@@ -91,12 +91,12 @@ func (l *Logger) formatHeader(ns int64, calldepth int) string {
if l.flag & (Ltime | Lmicroseconds) != 0 {
h += itoa(t.Hour, 2) + ":" + itoa(t.Minute, 2) + ":" + itoa(t.Second, 2);
if l.flag & Lmicroseconds != 0 {
- h += "." + itoa(int(ns%1e9) / 1e3, 6);
+ h += "." + itoa(int(ns%1e9)/1e3, 6);
}
h += " ";
}
}
- if l.flag & (Lshortfile|Llongfile) != 0 {
+ if l.flag & (Lshortfile | Llongfile) != 0 {
_, file, line, ok := runtime.Caller(calldepth);
if ok {
if l.flag & Lshortfile != 0 {
@@ -131,7 +131,7 @@ func (l *Logger) Output(calldepth int, s string) {
if len(s) > 0 && s[len(s)-1] == '\n' {
newline = "";
}
- s = l.formatHeader(now, calldepth+1) + s + newline;
+ s = l.formatHeader(now, calldepth + 1) + s + newline;
io.WriteString(l.out0, s);
if l.out1 != nil {
io.WriteString(l.out1, s);
diff --git a/src/pkg/log/log_test.go b/src/pkg/log/log_test.go
index 540a33891..9c28405dc 100644
--- a/src/pkg/log/log_test.go
+++ b/src/pkg/log/log_test.go
@@ -36,9 +36,9 @@ var tests = []tester{
tester{Lok|Ltime, "", Rtime+" "},
tester{Lok | Ltime | Lmicroseconds, "", Rtime + Rmicroseconds + " "},
tester{Lok | Lmicroseconds, "", Rtime + Rmicroseconds + " "}, // microsec implies time
- tester{Lok|Llongfile, "", Rlongfile+" "},
- tester{Lok|Lshortfile, "", Rshortfile+" "},
- tester{Lok|Llongfile|Lshortfile, "", Rshortfile+" "}, // shortfile overrides longfile
+ tester{Lok | Llongfile, "", Rlongfile + " "},
+ tester{Lok | Lshortfile, "", Rshortfile + " "},
+ tester{Lok | Llongfile | Lshortfile, "", Rshortfile + " "}, // shortfile overrides longfile
// everything at once:
tester{Lok | Ldate | Ltime | Lmicroseconds | Llongfile, "XXX", "XXX" + Rdate + " " + Rtime + Rmicroseconds + " " + Rlongfile + " "},
tester{Lok | Ldate | Ltime | Lmicroseconds | Lshortfile, "XXX", "XXX" + Rdate + " " + Rtime + Rmicroseconds + " " + Rshortfile + " "},
diff --git a/src/pkg/math/pow10.go b/src/pkg/math/pow10.go
index 72a6121cb..b9b074bb4 100644
--- a/src/pkg/math/pow10.go
+++ b/src/pkg/math/pow10.go
@@ -18,13 +18,13 @@ var pow10tab [70]float64
// Pow10 returns 10**x, the base-10 exponential of x.
func Pow10(e int) float64 {
if e < 0 {
- return 1 / Pow10(-e);
+ return 1/Pow10(-e);
}
if e < len(pow10tab) {
return pow10tab[e];
}
m := e/2;
- return Pow10(m) * Pow10(e-m);
+ return Pow10(m)*Pow10(e-m);
}
func init() {
diff --git a/src/pkg/math/sinh.go b/src/pkg/math/sinh.go
index 255ea2851..6974b5986 100644
--- a/src/pkg/math/sinh.go
+++ b/src/pkg/math/sinh.go
@@ -42,7 +42,7 @@ func Sinh(x float64) float64 {
temp = Exp(x)/2;
case x > 0.5:
- temp = (Exp(x) - Exp(-x))/2;
+ temp = (Exp(x)-Exp(-x))/2;
default:
sq := x*x;
@@ -64,5 +64,5 @@ func Cosh(x float64) float64 {
if x > 21 {
return Exp(x)/2;
}
- return (Exp(x) + Exp(-x))/2;
+ return (Exp(x)+Exp(-x))/2;
}
diff --git a/src/pkg/math/sqrt.go b/src/pkg/math/sqrt.go
index 49ebc6c76..b7fd4ee60 100644
--- a/src/pkg/math/sqrt.go
+++ b/src/pkg/math/sqrt.go
@@ -43,18 +43,18 @@ func Sqrt(x float64) float64 {
temp := 0.5*(1+y);
for exp > 60 {
- temp = temp * float64(1<<30);
+ temp = temp*float64(1<<30);
exp = exp-60;
}
for exp < -60 {
- temp = temp / float64(1<<30);
+ temp = temp/float64(1<<30);
exp = exp+60;
}
if exp >= 0 {
- exp = 1 << uint(exp/2);
+ exp = 1<<uint(exp/2);
temp = temp*float64(exp);
} else {
- exp = 1 << uint(-exp / 2);
+ exp = 1<<uint(-exp / 2);
temp = temp/float64(exp);
}
diff --git a/src/pkg/os/exec.go b/src/pkg/os/exec.go
index ac158c8d3..a94a2a842 100644
--- a/src/pkg/os/exec.go
+++ b/src/pkg/os/exec.go
@@ -111,7 +111,7 @@ func itod(i int) string {
bp := len(b);
for ; u > 0; u /= 10 {
bp--;
- b[bp] = byte(u%10) + '0';
+ b[bp] = byte(u%10)+'0';
}
if i < 0 {
diff --git a/src/pkg/os/path_test.go b/src/pkg/os/path_test.go
index cfdd64fc0..617d1e7f4 100644
--- a/src/pkg/os/path_test.go
+++ b/src/pkg/os/path_test.go
@@ -25,7 +25,7 @@ func TestMkdirAll(t *testing.T) {
}
// Make file.
- fpath := path + "/file";
+ fpath := path+"/file";
_, err = Open(fpath, O_WRONLY|O_CREAT, 0666);
if err != nil {
t.Fatalf("create %q: %s", fpath, err);
@@ -64,7 +64,7 @@ func TestMkdirAll(t *testing.T) {
func TestRemoveAll(t *testing.T) {
// Work directory.
path := "_obj/_TestRemoveAll_";
- fpath := path + "/file";
+ fpath := path+"/file";
dpath := path+"/dir";
// Make directory with 1 file and remove.
@@ -92,7 +92,7 @@ func TestRemoveAll(t *testing.T) {
t.Fatalf("create %q: %s", fpath, err);
}
fd.Close();
- fd, err = Open(dpath + "/file", O_WRONLY|O_CREAT, 0666);
+ fd, err = Open(dpath+"/file", O_WRONLY|O_CREAT, 0666);
if err != nil {
t.Fatalf("create %q: %s", fpath, err);
}
@@ -109,7 +109,7 @@ func TestRemoveAll(t *testing.T) {
t.Fatalf("MkdirAll %q: %s", dpath, err);
}
- for _, s := range []string{fpath, dpath + "/file1", path+"/zzz"} {
+ for _, s := range []string{fpath, dpath+"/file1", path+"/zzz"} {
fd, err = Open(s, O_WRONLY|O_CREAT, 0666);
if err != nil {
t.Fatalf("create %q: %s", s, err);
diff --git a/src/pkg/os/stat_nacl_386.go b/src/pkg/os/stat_nacl_386.go
index 67b2ba8c8..e36d3f9a2 100644
--- a/src/pkg/os/stat_nacl_386.go
+++ b/src/pkg/os/stat_nacl_386.go
@@ -27,9 +27,9 @@ func dirFromStat(name string, dir *Dir, lstat, stat *syscall.Stat_t) *Dir {
dir.Size = uint64(stat.Size);
dir.Blksize = uint64(stat.Blksize);
dir.Blocks = uint64(stat.Blocks);
- dir.Atime_ns = uint64(stat.Atime) * 1e9;
- dir.Mtime_ns = uint64(stat.Mtime) * 1e9;
- dir.Ctime_ns = uint64(stat.Ctime) * 1e9;
+ dir.Atime_ns = uint64(stat.Atime)*1e9;
+ dir.Mtime_ns = uint64(stat.Mtime)*1e9;
+ dir.Ctime_ns = uint64(stat.Ctime)*1e9;
for i := len(name)-1; i >= 0; i-- {
if name[i] == '/' {
name = name[i+1 : len(name)];
diff --git a/src/pkg/os/time.go b/src/pkg/os/time.go
index f5c2880a1..21d4df733 100644
--- a/src/pkg/os/time.go
+++ b/src/pkg/os/time.go
@@ -16,5 +16,5 @@ func Time() (sec int64, nsec int64, err Error) {
if errno := syscall.Gettimeofday(&tv); errno != 0 {
return 0, 0, NewSyscallError("gettimeofday", errno);
}
- return int64(tv.Sec), int64(tv.Usec) * 1000, err;
+ return int64(tv.Sec), int64(tv.Usec)*1000, err;
}
diff --git a/src/pkg/path/path.go b/src/pkg/path/path.go
index 49ea25db6..7fa8b863b 100644
--- a/src/pkg/path/path.go
+++ b/src/pkg/path/path.go
@@ -27,7 +27,7 @@ import "strings"
// http://plan9.bell-labs.com/sys/doc/lexnames.html
func Clean(path string) string {
if path == "" {
- return "."
+ return ".";
}
rooted := path[0] == '/';
@@ -105,10 +105,10 @@ func Clean(path string) string {
func Split(path string) (dir, file string) {
for i := len(path)-1; i >= 0; i-- {
if path[i] == '/' {
- return path[0:i+1], path[i+1:len(path)];
+ return path[0 : i+1], path[i+1 : len(path)];
}
}
- return "", path
+ return "", path;
}
// Join joins dir and file into a single path, adding a separating
@@ -117,7 +117,7 @@ func Join(dir, file string) string {
if dir == "" {
return file;
}
- return Clean(dir + "/" + file);
+ return Clean(dir+"/"+file);
}
// Ext returns the file name extension used by path.
@@ -130,6 +130,5 @@ func Ext(path string) string {
return path[i:len(path)];
}
}
- return ""
+ return "";
}
-
diff --git a/src/pkg/path/path_test.go b/src/pkg/path/path_test.go
index 22e0b4d39..c6f18e595 100644
--- a/src/pkg/path/path_test.go
+++ b/src/pkg/path/path_test.go
@@ -5,14 +5,14 @@
package path
import (
- "testing"
+ "testing";
)
type CleanTest struct {
- path, clean string
+ path, clean string;
}
-var cleantests = []CleanTest {
+var cleantests = []CleanTest{
// Already clean
CleanTest{"", "."},
CleanTest{"abc", "abc"},
@@ -71,10 +71,10 @@ func TestClean(t *testing.T) {
}
type SplitTest struct {
- path, dir, file string
+ path, dir, file string;
}
-var splittests = []SplitTest {
+var splittests = []SplitTest{
SplitTest{"a/b", "a/", "b"},
SplitTest{"a/b/", "a/b/", ""},
SplitTest{"a/", "a/", ""},
@@ -91,10 +91,10 @@ func TestSplit(t *testing.T) {
}
type JoinTest struct {
- dir, file, path string
+ dir, file, path string;
}
-var jointests = []JoinTest {
+var jointests = []JoinTest{
JoinTest{"a", "b", "a/b"},
JoinTest{"a", "", "a"},
JoinTest{"", "b", "b"},
@@ -113,10 +113,10 @@ func TestJoin(t *testing.T) {
}
type ExtTest struct {
- path, ext string
+ path, ext string;
}
-var exttests = []ExtTest {
+var exttests = []ExtTest{
ExtTest{"path.go", ".go"},
ExtTest{"path.pb.go", ".go"},
ExtTest{"a.dir/b", ""},
@@ -131,4 +131,3 @@ func TestExt(t *testing.T) {
}
}
}
-
diff --git a/src/pkg/regexp/all_test.go b/src/pkg/regexp/all_test.go
index b3df8bb50..2cd2fecf7 100644
--- a/src/pkg/regexp/all_test.go
+++ b/src/pkg/regexp/all_test.go
@@ -470,7 +470,7 @@ func TestAllMatches(t *testing.T) {
switch c.matchfunc {
case "matchit":
- result = make([]string, len(c.input) + 1);
+ result = make([]string, len(c.input)+1);
i := 0;
b := strings.Bytes(c.input);
for match := range re.AllMatchesIter(b, c.n) {
@@ -479,7 +479,7 @@ func TestAllMatches(t *testing.T) {
}
result = result[0:i];
case "stringmatchit":
- result = make([]string, len(c.input) + 1);
+ result = make([]string, len(c.input)+1);
i := 0;
for match := range re.AllMatchesStringIter(c.input, c.n) {
result[i] = match;
@@ -487,7 +487,7 @@ func TestAllMatches(t *testing.T) {
}
result = result[0:i];
case "match":
- result = make([]string, len(c.input) + 1);
+ result = make([]string, len(c.input)+1);
b := strings.Bytes(c.input);
i := 0;
for _, match := range re.AllMatches(b, c.n) {
diff --git a/src/pkg/strconv/atof.go b/src/pkg/strconv/atof.go
index 3b0562391..3202978d8 100644
--- a/src/pkg/strconv/atof.go
+++ b/src/pkg/strconv/atof.go
@@ -265,11 +265,11 @@ func decimalAtof64(neg bool, d *decimal, trunc bool) (f float64, ok bool) {
f *= float64pow10[k-22];
k = 22;
}
- return f*float64pow10[k], true;
+ return f * float64pow10[k], true;
case d.dp < d.nd && d.nd - d.dp <= 22: // int / 10^k
f := decimalAtof64Int(neg, d);
- return f/float64pow10[d.nd - d.dp], true;
+ return f / float64pow10[d.nd - d.dp], true;
}
return;
}
@@ -296,11 +296,11 @@ func decimalAtof32(neg bool, d *decimal, trunc bool) (f float32, ok bool) {
f *= float32pow10[k-10];
k = 10;
}
- return f*float32pow10[k], true;
+ return f * float32pow10[k], true;
case d.dp < d.nd && d.nd - d.dp <= 10: // int / 10^k
f := decimalAtof32Int(neg, d);
- return f/float32pow10[d.nd - d.dp], true;
+ return f / float32pow10[d.nd - d.dp], true;
}
return;
}
diff --git a/src/pkg/strconv/decimal_test.go b/src/pkg/strconv/decimal_test.go
index 443e057e8..470a81e2c 100644
--- a/src/pkg/strconv/decimal_test.go
+++ b/src/pkg/strconv/decimal_test.go
@@ -21,7 +21,7 @@ var shifttests = []shiftTest{
shiftTest{1, 100, "1267650600228229401496703205376"},
shiftTest{1, -100,
"0.00000000000000000000000000000078886090522101180541"
- "17285652827862296732064351090230047702789306640625",
+ "17285652827862296732064351090230047702789306640625",
},
shiftTest{12345678, 8, "3160493568"},
shiftTest{12345678, -8, "48225.3046875"},
diff --git a/src/pkg/strconv/fp_test.go b/src/pkg/strconv/fp_test.go
index 6f25acf78..47bf6231f 100644
--- a/src/pkg/strconv/fp_test.go
+++ b/src/pkg/strconv/fp_test.go
@@ -16,13 +16,13 @@ import (
func pow2(i int) float64 {
switch {
case i < 0:
- return 1 / pow2(-i);
+ return 1/pow2(-i);
case i == 0:
return 1;
case i == 1:
return 2;
}
- return pow2(i/2) * pow2(i - i/2);
+ return pow2(i/2)*pow2(i - i/2);
}
// Wrapper around strconv.Atof64. Handles dddddp+ddd (binary exponent)
diff --git a/src/pkg/strconv/ftoa.go b/src/pkg/strconv/ftoa.go
index 55e618881..0f3f50fe7 100644
--- a/src/pkg/strconv/ftoa.go
+++ b/src/pkg/strconv/ftoa.go
@@ -14,12 +14,13 @@ import "math"
// TODO: move elsewhere?
type floatInfo struct {
- mantbits uint;
- expbits uint;
- bias int;
+ mantbits uint;
+ expbits uint;
+ bias int;
}
-var float32info = floatInfo{ 23, 8, -127 }
-var float64info = floatInfo{ 52, 11, -1023 }
+
+var float32info = floatInfo{23, 8, -127}
+var float64info = floatInfo{52, 11, -1023}
func floatsize() int {
// Figure out whether float is float32 or float64.
@@ -72,12 +73,12 @@ func Ftoa(f float, fmt byte, prec int) string {
}
func genericFtoa(bits uint64, fmt byte, prec int, flt *floatInfo) string {
- neg := bits>>flt.expbits>>flt.mantbits != 0;
- exp := int(bits>>flt.mantbits) & (1<<flt.expbits - 1);
- mant := bits & (uint64(1)<<flt.mantbits - 1);
+ neg := bits >> flt.expbits >> flt.mantbits != 0;
+ exp := int(bits >> flt.mantbits)&(1 << flt.expbits - 1);
+ mant := bits&(uint64(1) << flt.mantbits - 1);
switch exp {
- case 1<<flt.expbits - 1:
+ case 1 << flt.expbits - 1:
// Inf, NaN
if mant != 0 {
return "NaN";
@@ -93,7 +94,7 @@ func genericFtoa(bits uint64, fmt byte, prec int, flt *floatInfo) string {
default:
// add implicit top bit
- mant |= uint64(1)<<flt.mantbits;
+ mant |= uint64(1) << flt.mantbits;
}
exp += flt.bias;
@@ -106,7 +107,7 @@ func genericFtoa(bits uint64, fmt byte, prec int, flt *floatInfo) string {
// The shift is exp - flt.mantbits because mant is a 1-bit integer
// followed by a flt.mantbits fraction, and we are treating it as
// a 1+flt.mantbits-bit integer.
- d := newDecimal(mant).Shift(exp - int(flt.mantbits));
+ d := newDecimal(mant).Shift(exp-int(flt.mantbits));
// Round appropriately.
// Negative precision means "only as much as needed to be exact."
@@ -127,7 +128,7 @@ func genericFtoa(bits uint64, fmt byte, prec int, flt *floatInfo) string {
case 'e', 'E':
d.Round(prec+1);
case 'f':
- d.Round(d.dp+prec);
+ d.Round(d.dp + prec);
case 'g', 'G':
if prec == 0 {
prec = 1;
@@ -151,16 +152,16 @@ func genericFtoa(bits uint64, fmt byte, prec int, flt *floatInfo) string {
// if precision was the shortest possible, use precision 6 for this decision.
eprec := prec;
if shortest {
- eprec = 6
+ eprec = 6;
}
exp := d.dp - 1;
if exp < -4 || exp >= eprec {
- return fmtE(neg, d, prec - 1, fmt + 'e' - 'g');
+ return fmtE(neg, d, prec-1, fmt+'e'-'g');
}
return fmtF(neg, d, max(prec - d.dp, 0));
}
- return "%" + string(fmt);
+ return "%"+string(fmt);
}
// Round d (= mant * 2^exp) to the shortest number of digits
@@ -185,7 +186,7 @@ func roundShortest(d *decimal, mant uint64, exp int, flt *floatInfo) {
// d = mant << (exp - mantbits)
// Next highest floating point number is mant+1 << exp-mantbits.
// Our upper bound is halfway inbetween, mant*2+1 << exp-mantbits-1.
- upper := newDecimal(mant*2+1).Shift(exp-int(flt.mantbits)-1);
+ upper := newDecimal(mant*2 + 1).Shift(exp-int(flt.mantbits)-1);
// d = mant << (exp - mantbits)
// Next lowest floating point number is mant-1 << exp-mantbits,
@@ -196,14 +197,14 @@ func roundShortest(d *decimal, mant uint64, exp int, flt *floatInfo) {
minexp := flt.bias + 1; // minimum possible exponent
var mantlo uint64;
var explo int;
- if mant > 1<<flt.mantbits || exp == minexp {
- mantlo = mant - 1;
+ if mant > 1 << flt.mantbits || exp == minexp {
+ mantlo = mant-1;
explo = exp;
} else {
- mantlo = mant*2-1;
+ mantlo = mant*2 - 1;
explo = exp-1;
}
- lower := newDecimal(mantlo*2+1).Shift(explo-int(flt.mantbits)-1);
+ lower := newDecimal(mantlo*2 + 1).Shift(explo-int(flt.mantbits)-1);
// The upper and lower bounds are possible outputs only if
// the original mantissa is even, so that IEEE round-to-even
@@ -252,8 +253,8 @@ func roundShortest(d *decimal, mant uint64, exp int, flt *floatInfo) {
// %e: -d.ddddde±dd
func fmtE(neg bool, d *decimal, prec int, fmt byte) string {
- buf := make([]byte, 3+max(prec, 0)+30); // "-0." + prec digits + exp
- w := 0; // write index
+ buf := make([]byte, 3 + max(prec, 0) + 30); // "-0." + prec digits + exp
+ w := 0; // write index
// sign
if neg {
@@ -322,7 +323,7 @@ func fmtE(neg bool, d *decimal, prec int, fmt byte) string {
// %f: -ddddddd.ddddd
func fmtF(neg bool, d *decimal, prec int) string {
- buf := make([]byte, 1+max(d.dp, 1)+1+max(prec, 0));
+ buf := make([]byte, 1 + max(d.dp, 1) + 1 + max(prec, 0));
w := 0;
// sign
@@ -352,10 +353,10 @@ func fmtF(neg bool, d *decimal, prec int) string {
buf[w] = '.';
w++;
for i := 0; i < prec; i++ {
- if d.dp+i < 0 || d.dp+i >= d.nd {
+ if d.dp + i < 0 || d.dp + i >= d.nd {
buf[w] = '0';
} else {
- buf[w] = d.d[d.dp+i];
+ buf[w] = d.d[d.dp + i];
}
w++;
}
@@ -379,7 +380,7 @@ func fmtB(neg bool, mant uint64, exp int, flt *floatInfo) string {
n++;
w--;
buf[w] = byte(exp%10 + '0');
- exp /= 10
+ exp /= 10;
}
w--;
buf[w] = esign;
@@ -405,4 +406,3 @@ func max(a, b int) int {
}
return b;
}
-
diff --git a/src/pkg/template/template_test.go b/src/pkg/template/template_test.go
index 5e73bc9f4..b17afd26e 100644
--- a/src/pkg/template/template_test.go
+++ b/src/pkg/template/template_test.go
@@ -13,32 +13,32 @@ import (
)
type Test struct {
- in, out, err string
+ in, out, err string;
}
type T struct {
- item string;
- value string;
+ item string;
+ value string;
}
type S struct {
- header string;
- integer int;
- raw string;
- innerT T;
- innerPointerT *T;
- data []T;
- pdata []*T;
- empty []*T;
- emptystring string;
- null []*T;
- vec *vector.Vector;
- true bool;
- false bool;
+ header string;
+ integer int;
+ raw string;
+ innerT T;
+ innerPointerT *T;
+ data []T;
+ pdata []*T;
+ empty []*T;
+ emptystring string;
+ null []*T;
+ vec *vector.Vector;
+ true bool;
+ false bool;
}
-var t1 = T{ "ItemNumber1", "ValueNumber1" }
-var t2 = T{ "ItemNumber2", "ValueNumber2" }
+var t1 = T{"ItemNumber1", "ValueNumber1"}
+var t2 = T{"ItemNumber2", "ValueNumber2"}
func uppercase(v interface{}) string {
s := v.(string);
@@ -46,7 +46,7 @@ func uppercase(v interface{}) string {
for i := 0; i < len(s); i++ {
c := s[i];
if 'a' <= c && c <= 'z' {
- c = c + 'A' - 'a'
+ c = c+'A'-'a';
}
t += string(c);
}
@@ -55,36 +55,36 @@ func uppercase(v interface{}) string {
func plus1(v interface{}) string {
i := v.(int);
- return fmt.Sprint(i + 1);
+ return fmt.Sprint(i+1);
}
func writer(f func(interface{}) string) (func(io.Writer, interface{}, string)) {
return func(w io.Writer, v interface{}, format string) {
io.WriteString(w, f(v));
- }
+ };
}
-var formatters = FormatterMap {
- "uppercase" : writer(uppercase),
- "+1" : writer(plus1),
+var formatters = FormatterMap{
+ "uppercase": writer(uppercase),
+ "+1": writer(plus1),
}
-var tests = []*Test {
+var tests = []*Test{
// Simple
- &Test{ "", "", "" },
- &Test{ "abc\ndef\n", "abc\ndef\n", "" },
- &Test{ " {.meta-left} \n", "{", "" },
- &Test{ " {.meta-right} \n", "}", "" },
- &Test{ " {.space} \n", " ", "" },
- &Test{ " {.tab} \n", "\t", "" },
- &Test{ " {#comment} \n", "", "" },
+ &Test{"", "", ""},
+ &Test{"abc\ndef\n", "abc\ndef\n", ""},
+ &Test{" {.meta-left} \n", "{", ""},
+ &Test{" {.meta-right} \n", "}", ""},
+ &Test{" {.space} \n", " ", ""},
+ &Test{" {.tab} \n", "\t", ""},
+ &Test{" {#comment} \n", "", ""},
// Variables at top level
&Test{
in: "{header}={integer}\n",
- out: "Header=77\n"
+ out: "Header=77\n",
},
// Section
@@ -93,21 +93,21 @@ var tests = []*Test {
"some text for the section\n"
"{.end}\n",
- out: "some text for the section\n"
+ out: "some text for the section\n",
},
&Test{
in: "{.section data }\n"
"{header}={integer}\n"
"{.end}\n",
- out: "Header=77\n"
+ out: "Header=77\n",
},
&Test{
in: "{.section pdata }\n"
"{header}={integer}\n"
"{.end}\n",
- out: "Header=77\n"
+ out: "Header=77\n",
},
&Test{
in: "{.section pdata }\n"
@@ -116,7 +116,7 @@ var tests = []*Test {
"data not present\n"
"{.end}\n",
- out: "data present\n"
+ out: "data present\n",
},
&Test{
in: "{.section empty }\n"
@@ -125,7 +125,7 @@ var tests = []*Test {
"data not present\n"
"{.end}\n",
- out: "data not present\n"
+ out: "data not present\n",
},
&Test{
in: "{.section null }\n"
@@ -134,7 +134,7 @@ var tests = []*Test {
"data not present\n"
"{.end}\n",
- out: "data not present\n"
+ out: "data not present\n",
},
&Test{
in: "{.section pdata }\n"
@@ -145,12 +145,12 @@ var tests = []*Test {
"{.end}\n",
out: "Header=77\n"
- "Header=77\n"
+ "Header=77\n",
},
&Test{
in: "{.section data}{.end} {header}\n",
- out: " Header\n"
+ out: " Header\n",
},
// Repeated
@@ -162,7 +162,7 @@ var tests = []*Test {
"{.end}\n",
out: "ItemNumber1=ValueNumber1\n"
- "ItemNumber2=ValueNumber2\n"
+ "ItemNumber2=ValueNumber2\n",
},
&Test{
in: "{.section pdata }\n"
@@ -174,7 +174,7 @@ var tests = []*Test {
"{.end}\n",
out: "ItemNumber1=ValueNumber1\n"
- "ItemNumber2=ValueNumber2\n"
+ "ItemNumber2=ValueNumber2\n",
},
&Test{
in: "{.section @ }\n"
@@ -185,7 +185,7 @@ var tests = []*Test {
"{.end}\n"
"{.end}\n",
- out: "this should appear: empty field\n"
+ out: "this should appear: empty field\n",
},
&Test{
in: "{.repeated section pdata }\n"
@@ -196,7 +196,7 @@ var tests = []*Test {
out: "ItemNumber1\n"
"is\nover\nmultiple\nlines\n"
- "ItemNumber2\n"
+ "ItemNumber2\n",
},
&Test{
in: "{.section pdata }\n"
@@ -210,7 +210,7 @@ var tests = []*Test {
out: "ItemNumber1=ValueNumber1\n"
"DIVIDER\n"
- "ItemNumber2=ValueNumber2\n"
+ "ItemNumber2=ValueNumber2\n",
},
&Test{
in: "{.repeated section vec }\n"
@@ -218,7 +218,7 @@ var tests = []*Test {
"{.end}\n",
out: "elt1\n"
- "elt2\n"
+ "elt2\n",
},
&Test{
in: "{.repeated section integer}{.end}",
@@ -232,14 +232,14 @@ var tests = []*Test {
"{innerT.item}={innerT.value}\n"
"{.end}",
- out: "ItemNumber1=ValueNumber1\n"
+ out: "ItemNumber1=ValueNumber1\n",
},
&Test{
in: "{.section @ }\n"
"{innerT.item}={.section innerT}{.section value}{@}{.end}{.end}\n"
"{.end}",
- out: "ItemNumber1=ValueNumber1\n"
+ out: "ItemNumber1=ValueNumber1\n",
},
@@ -251,7 +251,7 @@ var tests = []*Test {
"{.end}\n",
out: "HEADER=78\n"
- "Header=77\n"
+ "Header=77\n",
},
&Test{
@@ -259,21 +259,21 @@ var tests = []*Test {
"{raw|html}\n",
out: "&<>!@ #$%^\n"
- "&amp;&lt;&gt;!@ #$%^\n"
+ "&amp;&lt;&gt;!@ #$%^\n",
},
&Test{
in: "{.section emptystring}emptystring{.end}\n"
"{.section header}header{.end}\n",
- out: "\nheader\n"
+ out: "\nheader\n",
},
- &Test {
+ &Test{
in: "{.section true}1{.or}2{.end}\n"
"{.section false}3{.or}4{.end}\n",
- out: "1\n4\n"
+ out: "1\n4\n",
},
}
@@ -284,9 +284,9 @@ func TestAll(t *testing.T) {
s.integer = 77;
s.raw = "&<>!@ #$%^";
s.innerT = t1;
- s.data = []T{ t1, t2 };
- s.pdata = []*T{ &t1, &t2 };
- s.empty = []*T{ };
+ s.data = []T{t1, t2};
+ s.pdata = []*T{&t1, &t2};
+ s.empty = []*T{};
s.null = nil;
s.vec = vector.New(0);
s.vec.Push("elt1");
@@ -321,28 +321,28 @@ func TestAll(t *testing.T) {
func TestStringDriverType(t *testing.T) {
tmpl, err := Parse("template: {@}", nil);
if err != nil {
- t.Error("unexpected parse error:", err)
+ t.Error("unexpected parse error:", err);
}
var b bytes.Buffer;
err = tmpl.Execute("hello", &b);
if err != nil {
- t.Error("unexpected execute error:", err)
+ t.Error("unexpected execute error:", err);
}
s := b.String();
if s != "template: hello" {
- t.Errorf("failed passing string as data: expected %q got %q", "template: hello", s)
+ t.Errorf("failed passing string as data: expected %q got %q", "template: hello", s);
}
}
func TestTwice(t *testing.T) {
tmpl, err := Parse("template: {@}", nil);
if err != nil {
- t.Error("unexpected parse error:", err)
+ t.Error("unexpected parse error:", err);
}
var b bytes.Buffer;
err = tmpl.Execute("hello", &b);
if err != nil {
- t.Error("unexpected parse error:", err)
+ t.Error("unexpected parse error:", err);
}
s := b.String();
text := "template: hello";
@@ -351,7 +351,7 @@ func TestTwice(t *testing.T) {
}
err = tmpl.Execute("hello", &b);
if err != nil {
- t.Error("unexpected parse error:", err)
+ t.Error("unexpected parse error:", err);
}
s = b.String();
text += text;
@@ -377,9 +377,9 @@ func TestCustomDelims(t *testing.T) {
err := tmpl.Parse(text);
if err != nil {
if i == 0 || j == 0 { // expected
- continue
+ continue;
}
- t.Error("unexpected parse error:", err)
+ t.Error("unexpected parse error:", err);
} else if i == 0 || j == 0 {
t.Errorf("expected parse error for empty delimiter: %d %d %q %q", i, j, ldelim, rdelim);
continue;
@@ -388,7 +388,7 @@ func TestCustomDelims(t *testing.T) {
err = tmpl.Execute("hello", &b);
s := b.String();
if s != "template: hello" + ldelim + rdelim {
- t.Errorf("failed delim check(%q %q) %q got %q", ldelim, rdelim, text, s)
+ t.Errorf("failed delim check(%q %q) %q got %q", ldelim, rdelim, text, s);
}
}
}
@@ -408,7 +408,7 @@ func TestVarIndirection(t *testing.T) {
}
err = tmpl.Execute(s, &buf);
if err != nil {
- t.Fatal("unexpected execute error:", err)
+ t.Fatal("unexpected execute error:", err);
}
expect := fmt.Sprintf("%v", &t1); // output should be hex address of t1
if buf.String() != expect {
diff --git a/src/pkg/testing/iotest/reader.go b/src/pkg/testing/iotest/reader.go
index 3a560966a..823124aa3 100644
--- a/src/pkg/testing/iotest/reader.go
+++ b/src/pkg/testing/iotest/reader.go
@@ -70,7 +70,7 @@ func (r *dataErrReader) Read(p []byte) (n int, err os.Error) {
break;
}
n = bytes.Copy(p, r.unread);
- r.unread = r.unread[n : len(r.unread)];
+ r.unread = r.unread[n:len(r.unread)];
}
return;
}
diff --git a/src/pkg/testing/regexp.go b/src/pkg/testing/regexp.go
index e5b5eac4f..7e6539e9e 100644
--- a/src/pkg/testing/regexp.go
+++ b/src/pkg/testing/regexp.go
@@ -29,28 +29,28 @@ import (
"utf8";
)
-var debug = false;
+var debug = false
// Error codes returned by failures to parse an expression.
var (
- ErrInternal = "internal error";
- ErrUnmatchedLpar = "unmatched ''";
- ErrUnmatchedRpar = "unmatched ''";
- ErrUnmatchedLbkt = "unmatched '['";
- ErrUnmatchedRbkt = "unmatched ']'";
- ErrBadRange = "bad range in character class";
- ErrExtraneousBackslash = "extraneous backslash";
- ErrBadClosure = "repeated closure **, ++, etc.";
- ErrBareClosure = "closure applies to nothing";
- ErrBadBackslash = "illegal backslash escape";
+ ErrInternal = "internal error";
+ ErrUnmatchedLpar = "unmatched ''";
+ ErrUnmatchedRpar = "unmatched ''";
+ ErrUnmatchedLbkt = "unmatched '['";
+ ErrUnmatchedRbkt = "unmatched ']'";
+ ErrBadRange = "bad range in character class";
+ ErrExtraneousBackslash = "extraneous backslash";
+ ErrBadClosure = "repeated closure **, ++, etc.";
+ ErrBareClosure = "closure applies to nothing";
+ ErrBadBackslash = "illegal backslash escape";
)
// An instruction executed by the NFA
type instr interface {
- kind() int; // the type of this instruction: _CHAR, _ANY, etc.
- next() instr; // the instruction to execute after this one
+ kind() int; // the type of this instruction: _CHAR, _ANY, etc.
+ next() instr; // the instruction to execute after this one
setNext(i instr);
- index() int;
+ index() int;
setIndex(i int);
print();
}
@@ -61,69 +61,93 @@ type common struct {
_index int;
}
-func (c *common) next() instr { return c._next }
-func (c *common) setNext(i instr) { c._next = i }
-func (c *common) index() int { return c._index }
-func (c *common) setIndex(i int) { c._index = i }
+func (c *common) next() instr {
+ return c._next;
+}
+func (c *common) setNext(i instr) {
+ c._next = i;
+}
+func (c *common) index() int {
+ return c._index;
+}
+func (c *common) setIndex(i int) {
+ c._index = i;
+}
// The representation of a compiled regular expression.
// The public interface is entirely through methods.
type Regexp struct {
- expr string; // the original expression
+ expr string; // the original expression
ch chan<- *Regexp; // reply channel when we're done
- error string; // compile- or run-time error; nil if OK
+ error string; // compile- or run-time error; nil if OK
inst []instr;
start instr;
nbra int; // number of brackets in expression, for subexpressions
}
const (
- _START // beginning of program
- = iota;
+ _START = // beginning of program
+ iota;
_END; // end of program: success
_BOT; // '^' beginning of text
_EOT; // '$' end of text
- _CHAR; // 'a' regular character
+ _CHAR; // 'a' regular character
_CHARCLASS; // [a-z] character class
_ANY; // '.' any character including newline
_NOTNL; // [^\n] special case: any character but newline
_BRA; // '(' parenthesized expression
- _EBRA; // ')'; end of '(' parenthesized expression
+ _EBRA; // ')'; end of '(' parenthesized expression
_ALT; // '|' alternation
_NOP; // do nothing; makes it easy to link without patching
)
// --- START start of program
type _Start struct {
- common
+ common;
}
-func (start *_Start) kind() int { return _START }
-func (start *_Start) print() { print("start") }
+func (start *_Start) kind() int {
+ return _START;
+}
+func (start *_Start) print() {
+ print("start");
+}
// --- END end of program
type _End struct {
- common
+ common;
}
-func (end *_End) kind() int { return _END }
-func (end *_End) print() { print("end") }
+func (end *_End) kind() int {
+ return _END;
+}
+func (end *_End) print() {
+ print("end");
+}
// --- BOT beginning of text
type _Bot struct {
- common
+ common;
}
-func (bot *_Bot) kind() int { return _BOT }
-func (bot *_Bot) print() { print("bot") }
+func (bot *_Bot) kind() int {
+ return _BOT;
+}
+func (bot *_Bot) print() {
+ print("bot");
+}
// --- EOT end of text
type _Eot struct {
- common
+ common;
}
-func (eot *_Eot) kind() int { return _EOT }
-func (eot *_Eot) print() { print("eot") }
+func (eot *_Eot) kind() int {
+ return _EOT;
+}
+func (eot *_Eot) print() {
+ print("eot");
+}
// --- CHAR a regular character
type _Char struct {
@@ -131,8 +155,12 @@ type _Char struct {
char int;
}
-func (char *_Char) kind() int { return _CHAR }
-func (char *_Char) print() { print("char ", string(char.char)) }
+func (char *_Char) kind() int {
+ return _CHAR;
+}
+func (char *_Char) print() {
+ print("char ", string(char.char));
+}
func newChar(char int) *_Char {
c := new(_Char);
@@ -150,7 +178,9 @@ type _CharClass struct {
ranges []int;
}
-func (cclass *_CharClass) kind() int { return _CHARCLASS }
+func (cclass *_CharClass) kind() int {
+ return _CHARCLASS;
+}
func (cclass *_CharClass) print() {
print("charclass");
@@ -174,11 +204,11 @@ func (cclass *_CharClass) addRange(a, b int) {
if n >= cap(cclass.ranges) {
nr := make([]int, n, 2*n);
for i, j := range nr {
- nr[i] = j
+ nr[i] = j;
}
cclass.ranges = nr;
}
- cclass.ranges = cclass.ranges[0:n+2];
+ cclass.ranges = cclass.ranges[0 : n+2];
cclass.ranges[n] = a;
n++;
cclass.ranges[n] = b;
@@ -190,10 +220,10 @@ func (cclass *_CharClass) matches(c int) bool {
min := cclass.ranges[i];
max := cclass.ranges[i+1];
if min <= c && c <= max {
- return !cclass.negate
+ return !cclass.negate;
}
}
- return cclass.negate
+ return cclass.negate;
}
func newCharClass() *_CharClass {
@@ -204,19 +234,27 @@ func newCharClass() *_CharClass {
// --- ANY any character
type _Any struct {
- common
+ common;
}
-func (any *_Any) kind() int { return _ANY }
-func (any *_Any) print() { print("any") }
+func (any *_Any) kind() int {
+ return _ANY;
+}
+func (any *_Any) print() {
+ print("any");
+}
// --- NOTNL any character but newline
type _NotNl struct {
- common
+ common;
}
-func (notnl *_NotNl) kind() int { return _NOTNL }
-func (notnl *_NotNl) print() { print("notnl") }
+func (notnl *_NotNl) kind() int {
+ return _NOTNL;
+}
+func (notnl *_NotNl) print() {
+ print("notnl");
+}
// --- BRA parenthesized expression
type _Bra struct {
@@ -224,8 +262,12 @@ type _Bra struct {
n int; // subexpression number
}
-func (bra *_Bra) kind() int { return _BRA }
-func (bra *_Bra) print() { print("bra", bra.n); }
+func (bra *_Bra) kind() int {
+ return _BRA;
+}
+func (bra *_Bra) print() {
+ print("bra", bra.n);
+}
// --- EBRA end of parenthesized expression
type _Ebra struct {
@@ -233,8 +275,12 @@ type _Ebra struct {
n int; // subexpression number
}
-func (ebra *_Ebra) kind() int { return _EBRA }
-func (ebra *_Ebra) print() { print("ebra ", ebra.n); }
+func (ebra *_Ebra) kind() int {
+ return _EBRA;
+}
+func (ebra *_Ebra) print() {
+ print("ebra ", ebra.n);
+}
// --- ALT alternation
type _Alt struct {
@@ -242,16 +288,24 @@ type _Alt struct {
left instr; // other branch
}
-func (alt *_Alt) kind() int { return _ALT }
-func (alt *_Alt) print() { print("alt(", alt.left.index(), ")"); }
+func (alt *_Alt) kind() int {
+ return _ALT;
+}
+func (alt *_Alt) print() {
+ print("alt(", alt.left.index(), ")");
+}
// --- NOP no operation
type _Nop struct {
- common
+ common;
}
-func (nop *_Nop) kind() int { return _NOP }
-func (nop *_Nop) print() { print("nop") }
+func (nop *_Nop) kind() int {
+ return _NOP;
+}
+func (nop *_Nop) print() {
+ print("nop");
+}
// report error and exit compiling/executing goroutine
func (re *Regexp) setError(err string) {
@@ -266,11 +320,11 @@ func (re *Regexp) add(i instr) instr {
if n >= cap(re.inst) {
ni := make([]instr, n, 2*n);
for i, j := range re.inst {
- ni[i] = j
+ ni[i] = j;
}
re.inst = ni;
}
- re.inst = re.inst[0:n+1];
+ re.inst = re.inst[0 : n+1];
re.inst[n] = i;
return i;
}
@@ -290,9 +344,9 @@ func (p *parser) c() int {
func (p *parser) nextc() int {
if p.pos >= len(p.re.expr) {
- p.ch = endOfFile
+ p.ch = endOfFile;
} else {
- c, w := utf8.DecodeRuneInString(p.re.expr[p.pos:len(p.re.expr)]);
+ c, w := utf8.DecodeRuneInString(p.re.expr[p.pos : len(p.re.expr)]);
p.ch = c;
p.pos += w;
}
@@ -312,20 +366,20 @@ func special(c int) bool {
s := `\.+*?()|[]^$`;
for i := 0; i < len(s); i++ {
if c == int(s[i]) {
- return true
+ return true;
}
}
- return false
+ return false;
}
func specialcclass(c int) bool {
s := `\-[]`;
for i := 0; i < len(s); i++ {
if c == int(s[i]) {
- return true
+ return true;
}
}
- return false
+ return false;
}
func (p *parser) charClass() instr {
@@ -360,7 +414,7 @@ func (p *parser) charClass() instr {
case c == 'n':
c = '\n';
case specialcclass(c):
- // c is as delivered
+ // c is as delivered
default:
p.re.setError(ErrBadBackslash);
}
@@ -383,7 +437,7 @@ func (p *parser) charClass() instr {
}
}
}
- return iNULL
+ return iNULL;
}
func (p *parser) term() (start, end instr) {
@@ -438,9 +492,9 @@ func (p *parser) term() (start, end instr) {
ebra.n = nbra;
if start == iNULL {
if end == iNULL {
- p.re.setError(ErrInternal)
+ p.re.setError(ErrInternal);
}
- start = ebra
+ start = ebra;
} else {
end.setNext(ebra);
}
@@ -454,7 +508,7 @@ func (p *parser) term() (start, end instr) {
case c == 'n':
c = '\n';
case special(c):
- // c is as delivered
+ // c is as delivered
default:
p.re.setError(ErrBadBackslash);
}
@@ -463,7 +517,7 @@ func (p *parser) term() (start, end instr) {
p.nextc();
start = newChar(c);
p.re.add(start);
- return start, start
+ return start, start;
}
panic("unreachable");
}
@@ -471,7 +525,7 @@ func (p *parser) term() (start, end instr) {
func (p *parser) closure() (start, end instr) {
start, end = p.term();
if start == iNULL {
- return
+ return;
}
switch p.c() {
case '*':
@@ -480,7 +534,7 @@ func (p *parser) closure() (start, end instr) {
p.re.add(alt);
end.setNext(alt); // after end, do alt
alt.left = start; // alternate brach: return to start
- start = alt; // alt becomes new (start, end)
+ start = alt; // alt becomes new (start, end)
end = alt;
case '+':
// (start,end)+:
@@ -488,7 +542,7 @@ func (p *parser) closure() (start, end instr) {
p.re.add(alt);
end.setNext(alt); // after end, do alt
alt.left = start; // alternate brach: return to start
- end = alt; // start is unchanged; end is alt
+ end = alt; // start is unchanged; end is alt
case '?':
// (start,end)?:
alt := new(_Alt);
@@ -498,16 +552,16 @@ func (p *parser) closure() (start, end instr) {
alt.left = start; // alternate branch is start
alt.setNext(nop); // follow on to nop
end.setNext(nop); // after end, go to nop
- start = alt; // start is now alt
- end = nop; // end is nop pointed to by both branches
+ start = alt; // start is now alt
+ end = nop; // end is nop pointed to by both branches
default:
- return
+ return;
}
switch p.nextc() {
case '*', '+', '?':
p.re.setError(ErrBadClosure);
}
- return
+ return;
}
func (p *parser) concatenation() (start, end instr) {
@@ -556,16 +610,16 @@ func (p *parser) regexp() (start, end instr) {
func unNop(i instr) instr {
for i.kind() == _NOP {
- i = i.next()
+ i = i.next();
}
- return i
+ return i;
}
func (re *Regexp) eliminateNops() {
for i := 0; i < len(re.inst); i++ {
inst := re.inst[i];
if inst.kind() == _END {
- continue
+ continue;
}
inst.setNext(unNop(inst.next()));
if inst.kind() == _ALT {
@@ -581,7 +635,7 @@ func (re *Regexp) dump() {
print(inst.index(), ": ");
inst.print();
if inst.kind() != _END {
- print(" -> ", inst.next().index())
+ print(" -> ", inst.next().index());
}
print("\n");
}
@@ -626,7 +680,7 @@ func CompileRegexp(str string) (regexp *Regexp, error string) {
ch := make(chan *Regexp);
go compiler(str, ch);
re := <-ch;
- return re, re.error
+ return re, re.error;
}
type state struct {
@@ -643,10 +697,10 @@ func addState(s []state, inst instr, match []int) []state {
// TODO: Once the state is a vector and we can do insert, have inputs always
// go in order correctly and this "earlier" test is never necessary,
for i := 0; i < l; i++ {
- if s[i].inst.index() == index && // same instruction
- s[i].match[0] < pos { // earlier match already going; lefmost wins
- return s
- }
+ if s[i].inst.index() == index && // same instruction
+ s[i].match[0] < pos { // earlier match already going; lefmost wins
+ return s;
+ }
}
if l == cap(s) {
s1 := make([]state, 2*l)[0:l];
@@ -655,7 +709,7 @@ func addState(s []state, inst instr, match []int) []state {
}
s = s1;
}
- s = s[0:l+1];
+ s = s[0 : l+1];
s[l].inst = inst;
s[l].match = match;
return s;
@@ -672,16 +726,16 @@ func (re *Regexp) doExecute(str string, bytes []byte, pos int) []int {
found := false;
end := len(str);
if bytes != nil {
- end = len(bytes)
+ end = len(bytes);
}
for pos <= end {
if !found {
// prime the pump if we haven't seen a match yet
- match := make([]int, 2*(re.nbra+1));
+ match := make([]int, 2*(re.nbra + 1));
for i := 0; i < len(match); i++ {
match[i] = -1; // no match seen; catches cases like "a(b)?c" on "ac"
}
- match[0] = pos;
+ match[0] = pos;
s[out] = addState(s[out], re.start.next(), match);
}
in, out = out, in; // old out state is new in state
@@ -704,27 +758,27 @@ func (re *Regexp) doExecute(str string, bytes []byte, pos int) []int {
switch s[in][i].inst.kind() {
case _BOT:
if pos == 0 {
- s[in] = addState(s[in], st.inst.next(), st.match)
+ s[in] = addState(s[in], st.inst.next(), st.match);
}
case _EOT:
if pos == end {
- s[in] = addState(s[in], st.inst.next(), st.match)
+ s[in] = addState(s[in], st.inst.next(), st.match);
}
case _CHAR:
if c == st.inst.(*_Char).char {
- s[out] = addState(s[out], st.inst.next(), st.match)
+ s[out] = addState(s[out], st.inst.next(), st.match);
}
case _CHARCLASS:
if st.inst.(*_CharClass).matches(c) {
- s[out] = addState(s[out], st.inst.next(), st.match)
+ s[out] = addState(s[out], st.inst.next(), st.match);
}
case _ANY:
if c != endOfFile {
- s[out] = addState(s[out], st.inst.next(), st.match)
+ s[out] = addState(s[out], st.inst.next(), st.match);
}
case _NOTNL:
if c != endOfFile && c != '\n' {
- s[out] = addState(s[out], st.inst.next(), st.match)
+ s[out] = addState(s[out], st.inst.next(), st.match);
}
case _BRA:
n := st.inst.(*_Bra).n;
@@ -732,21 +786,21 @@ func (re *Regexp) doExecute(str string, bytes []byte, pos int) []int {
s[in] = addState(s[in], st.inst.next(), st.match);
case _EBRA:
n := st.inst.(*_Ebra).n;
- st.match[2*n+1] = pos;
+ st.match[2*n + 1] = pos;
s[in] = addState(s[in], st.inst.next(), st.match);
case _ALT:
s[in] = addState(s[in], st.inst.(*_Alt).left, st.match);
// give other branch a copy of this match vector
- s1 := make([]int, 2*(re.nbra+1));
+ s1 := make([]int, 2*(re.nbra + 1));
for i := 0; i < len(s1); i++ {
- s1[i] = st.match[i]
+ s1[i] = st.match[i];
}
s[in] = addState(s[in], st.inst.next(), s1);
case _END:
// choose leftmost longest
if !found || // first
- st.match[0] < final.match[0] || // leftmost
- (st.match[0] == final.match[0] && pos > final.match[1]) { // longest
+ st.match[0] < final.match[0] || // leftmost
+ (st.match[0] == final.match[0] && pos > final.match[1]) { // longest
final = st;
final.match[1] = pos;
}
@@ -770,7 +824,7 @@ func (re *Regexp) doExecute(str string, bytes []byte, pos int) []int {
// A negative value means the subexpression did not match any element of the string.
// An empty array means "no match".
func (re *Regexp) ExecuteString(s string) (a []int) {
- return re.doExecute(s, nil, 0)
+ return re.doExecute(s, nil, 0);
}
@@ -782,21 +836,21 @@ func (re *Regexp) ExecuteString(s string) (a []int) {
// A negative value means the subexpression did not match any element of the slice.
// An empty array means "no match".
func (re *Regexp) Execute(b []byte) (a []int) {
- return re.doExecute("", b, 0)
+ return re.doExecute("", b, 0);
}
// MatchString returns whether the Regexp matches the string s.
// The return value is a boolean: true for match, false for no match.
func (re *Regexp) MatchString(s string) bool {
- return len(re.doExecute(s, nil, 0)) > 0
+ return len(re.doExecute(s, nil, 0)) > 0;
}
// Match returns whether the Regexp matches the byte slice b.
// The return value is a boolean: true for match, false for no match.
func (re *Regexp) Match(b []byte) bool {
- return len(re.doExecute("", b, 0)) > 0
+ return len(re.doExecute("", b, 0)) > 0;
}
@@ -808,15 +862,15 @@ func (re *Regexp) Match(b []byte) bool {
func (re *Regexp) MatchStrings(s string) (a []string) {
r := re.doExecute(s, nil, 0);
if r == nil {
- return nil
+ return nil;
}
a = make([]string, len(r)/2);
for i := 0; i < len(r); i += 2 {
if r[i] != -1 { // -1 means no match for this subexpression
- a[i/2] = s[r[i] : r[i+1]]
+ a[i/2] = s[r[i]:r[i+1]];
}
}
- return
+ return;
}
// MatchSlices matches the Regexp against the byte slice b.
@@ -827,15 +881,15 @@ func (re *Regexp) MatchStrings(s string) (a []string) {
func (re *Regexp) MatchSlices(b []byte) (a [][]byte) {
r := re.doExecute("", b, 0);
if r == nil {
- return nil
+ return nil;
}
a = make([][]byte, len(r)/2);
for i := 0; i < len(r); i += 2 {
if r[i] != -1 { // -1 means no match for this subexpression
- a[i/2] = b[r[i] : r[i+1]]
+ a[i/2] = b[r[i]:r[i+1]];
}
}
- return
+ return;
}
// MatchString checks whether a textual regular expression
@@ -844,9 +898,9 @@ func (re *Regexp) MatchSlices(b []byte) (a [][]byte) {
func MatchString(pattern string, s string) (matched bool, error string) {
re, err := CompileRegexp(pattern);
if err != "" {
- return false, err
+ return false, err;
}
- return re.MatchString(s), ""
+ return re.MatchString(s), "";
}
// Match checks whether a textual regular expression
@@ -855,7 +909,7 @@ func MatchString(pattern string, s string) (matched bool, error string) {
func Match(pattern string, b []byte) (matched bool, error string) {
re, err := CompileRegexp(pattern);
if err != "" {
- return false, err
+ return false, err;
}
- return re.Match(b), ""
+ return re.Match(b), "";
}
diff --git a/src/pkg/testing/regexp_test.go b/src/pkg/testing/regexp_test.go
index 367a61d8c..d24e801b9 100644
--- a/src/pkg/testing/regexp_test.go
+++ b/src/pkg/testing/regexp_test.go
@@ -33,22 +33,23 @@ type stringError struct {
re string;
err string;
}
+
var bad_re = []stringError{
- stringError{ `*`, ErrBareClosure },
- stringError{ `(abc`, ErrUnmatchedLpar },
- stringError{ `abc)`, ErrUnmatchedRpar },
- stringError{ `x[a-z`, ErrUnmatchedLbkt },
- stringError{ `abc]`, ErrUnmatchedRbkt },
- stringError{ `[z-a]`, ErrBadRange },
- stringError{ `abc\`, ErrExtraneousBackslash },
- stringError{ `a**`, ErrBadClosure },
- stringError{ `a*+`, ErrBadClosure },
- stringError{ `a??`, ErrBadClosure },
- stringError{ `*`, ErrBareClosure },
- stringError{ `\x`, ErrBadBackslash },
+ stringError{`*`, ErrBareClosure},
+ stringError{`(abc`, ErrUnmatchedLpar},
+ stringError{`abc)`, ErrUnmatchedRpar},
+ stringError{`x[a-z`, ErrUnmatchedLbkt},
+ stringError{`abc]`, ErrUnmatchedRbkt},
+ stringError{`[z-a]`, ErrBadRange},
+ stringError{`abc\`, ErrExtraneousBackslash},
+ stringError{`a**`, ErrBadClosure},
+ stringError{`a*+`, ErrBadClosure},
+ stringError{`a??`, ErrBadClosure},
+ stringError{`*`, ErrBareClosure},
+ stringError{`\x`, ErrBadBackslash},
}
-type vec []int;
+type vec []int
type tester struct {
re string;
@@ -56,33 +57,33 @@ type tester struct {
match vec;
}
-var matches = []tester {
- tester{ ``, "", vec{0,0} },
- tester{ `a`, "a", vec{0,1} },
- tester{ `x`, "y", vec{} },
- tester{ `b`, "abc", vec{1,2} },
- tester{ `.`, "a", vec{0,1} },
- tester{ `.*`, "abcdef", vec{0,6} },
- tester{ `^abcd$`, "abcd", vec{0,4} },
- tester{ `^bcd'`, "abcdef", vec{} },
- tester{ `^abcd$`, "abcde", vec{} },
- tester{ `a+`, "baaab", vec{1,4} },
- tester{ `a*`, "baaab", vec{0,0} },
- tester{ `[a-z]+`, "abcd", vec{0,4} },
- tester{ `[^a-z]+`, "ab1234cd", vec{2,6} },
- tester{ `[a\-\]z]+`, "az]-bcz", vec{0,4} },
- tester{ `[^\n]+`, "abcd\n", vec{0,4} },
- tester{ `[日本語]+`, "日本語日本語", vec{0,18} },
- tester{ `()`, "", vec{0,0, 0,0} },
- tester{ `(a)`, "a", vec{0,1, 0,1} },
- tester{ `(.)(.)`, "日a", vec{0,4, 0,3, 3,4} },
- tester{ `(.*)`, "", vec{0,0, 0,0} },
- tester{ `(.*)`, "abcd", vec{0,4, 0,4} },
- tester{ `(..)(..)`, "abcd", vec{0,4, 0,2, 2,4} },
- tester{ `(([^xyz]*)(d))`, "abcd", vec{0,4, 0,4, 0,3, 3,4} },
- tester{ `((a|b|c)*(d))`, "abcd", vec{0,4, 0,4, 2,3, 3,4} },
- tester{ `(((a|b|c)*)(d))`, "abcd", vec{0,4, 0,4, 0,3, 2,3, 3,4} },
- tester{ `a*(|(b))c*`, "aacc", vec{0,4, 2,2, -1,-1} },
+var matches = []tester{
+ tester{``, "", vec{0, 0}},
+ tester{`a`, "a", vec{0, 1}},
+ tester{`x`, "y", vec{}},
+ tester{`b`, "abc", vec{1, 2}},
+ tester{`.`, "a", vec{0, 1}},
+ tester{`.*`, "abcdef", vec{0, 6}},
+ tester{`^abcd$`, "abcd", vec{0, 4}},
+ tester{`^bcd'`, "abcdef", vec{}},
+ tester{`^abcd$`, "abcde", vec{}},
+ tester{`a+`, "baaab", vec{1, 4}},
+ tester{`a*`, "baaab", vec{0, 0}},
+ tester{`[a-z]+`, "abcd", vec{0, 4}},
+ tester{`[^a-z]+`, "ab1234cd", vec{2, 6}},
+ tester{`[a\-\]z]+`, "az]-bcz", vec{0, 4}},
+ tester{`[^\n]+`, "abcd\n", vec{0, 4}},
+ tester{`[日本語]+`, "日本語日本語", vec{0, 18}},
+ tester{`()`, "", vec{0, 0, 0, 0}},
+ tester{`(a)`, "a", vec{0, 1, 0, 1}},
+ tester{`(.)(.)`, "日a", vec{0, 4, 0, 3, 3, 4}},
+ tester{`(.*)`, "", vec{0, 0, 0, 0}},
+ tester{`(.*)`, "abcd", vec{0, 4, 0, 4}},
+ tester{`(..)(..)`, "abcd", vec{0, 4, 0, 2, 2, 4}},
+ tester{`(([^xyz]*)(d))`, "abcd", vec{0, 4, 0, 4, 0, 3, 3, 4}},
+ tester{`((a|b|c)*(d))`, "abcd", vec{0, 4, 0, 4, 2, 3, 3, 4}},
+ tester{`(((a|b|c)*)(d))`, "abcd", vec{0, 4, 0, 4, 0, 3, 2, 3, 3, 4}},
+ tester{`a*(|(b))c*`, "aacc", vec{0, 4, 2, 2, -1, -1}},
}
func compileTest(t *T, expr string, error string) *Regexp {
@@ -90,7 +91,7 @@ func compileTest(t *T, expr string, error string) *Regexp {
if err != error {
t.Error("compiling `", expr, "`; unexpected error: ", err);
}
- return re
+ return re;
}
func printVec(t *T, m []int) {
@@ -99,7 +100,7 @@ func printVec(t *T, m []int) {
t.Log("\t<no match>");
} else {
for i := 0; i < l; i = i+2 {
- t.Log("\t", m[i], ",", m[i+1])
+ t.Log("\t", m[i], ",", m[i+1]);
}
}
}
@@ -110,7 +111,7 @@ func printStrings(t *T, m []string) {
t.Log("\t<no match>");
} else {
for i := 0; i < l; i = i+2 {
- t.Logf("\t%q", m[i])
+ t.Logf("\t%q", m[i]);
}
}
}
@@ -121,7 +122,7 @@ func printBytes(t *T, b [][]byte) {
t.Log("\t<no match>");
} else {
for i := 0; i < l; i = i+2 {
- t.Logf("\t%q", b[i])
+ t.Logf("\t%q", b[i]);
}
}
}
@@ -129,46 +130,46 @@ func printBytes(t *T, b [][]byte) {
func equal(m1, m2 []int) bool {
l := len(m1);
if l != len(m2) {
- return false
+ return false;
}
for i := 0; i < l; i++ {
if m1[i] != m2[i] {
- return false
+ return false;
}
}
- return true
+ return true;
}
func equalStrings(m1, m2 []string) bool {
l := len(m1);
if l != len(m2) {
- return false
+ return false;
}
for i := 0; i < l; i++ {
if m1[i] != m2[i] {
- return false
+ return false;
}
}
- return true
+ return true;
}
func equalBytes(m1 [][]byte, m2 []string) bool {
l := len(m1);
if l != len(m2) {
- return false
+ return false;
}
for i := 0; i < l; i++ {
if string(m1[i]) != m2[i] {
- return false
+ return false;
}
}
- return true
+ return true;
}
func executeTest(t *T, expr string, str string, match []int) {
re := compileTest(t, expr, "");
if re == nil {
- return
+ return;
}
m := re.ExecuteString(str);
if !equal(m, match) {
@@ -195,21 +196,21 @@ func TestGoodCompile(t *T) {
func TestBadCompile(t *T) {
for i := 0; i < len(bad_re); i++ {
- compileTest(t, bad_re[i].re, bad_re[i].err)
+ compileTest(t, bad_re[i].re, bad_re[i].err);
}
}
func TestExecute(t *T) {
for i := 0; i < len(matches); i++ {
test := &matches[i];
- executeTest(t, test.re, test.text, test.match)
+ executeTest(t, test.re, test.text, test.match);
}
}
func matchTest(t *T, expr string, str string, match []int) {
re := compileTest(t, expr, "");
if re == nil {
- return
+ return;
}
m := re.MatchString(str);
if m != (len(match) > 0) {
@@ -225,18 +226,18 @@ func matchTest(t *T, expr string, str string, match []int) {
func TestMatch(t *T) {
for i := 0; i < len(matches); i++ {
test := &matches[i];
- matchTest(t, test.re, test.text, test.match)
+ matchTest(t, test.re, test.text, test.match);
}
}
func matchStringsTest(t *T, expr string, str string, match []int) {
re := compileTest(t, expr, "");
if re == nil {
- return
+ return;
}
strs := make([]string, len(match)/2);
for i := 0; i < len(match); i++ {
- strs[i/2] = str[match[i] : match[i+1]]
+ strs[i/2] = str[match[i]:match[i+1]];
}
m := re.MatchStrings(str);
if !equalStrings(m, strs) {
@@ -258,14 +259,14 @@ func matchStringsTest(t *T, expr string, str string, match []int) {
func TestMatchStrings(t *T) {
for i := 0; i < len(matches); i++ {
test := &matches[i];
- matchTest(t, test.re, test.text, test.match)
+ matchTest(t, test.re, test.text, test.match);
}
}
func matchFunctionTest(t *T, expr string, str string, match []int) {
m, err := MatchString(expr, str);
if err == "" {
- return
+ return;
}
if m != (len(match) > 0) {
t.Error("function Match failure on `", expr, "` matching `", str, "`:", m, "should be", len(match) > 0);
@@ -275,6 +276,6 @@ func matchFunctionTest(t *T, expr string, str string, match []int) {
func TestMatchFunction(t *T) {
for i := 0; i < len(matches); i++ {
test := &matches[i];
- matchFunctionTest(t, test.re, test.text, test.match)
+ matchFunctionTest(t, test.re, test.text, test.match);
}
}
diff --git a/src/pkg/testing/testing.go b/src/pkg/testing/testing.go
index df542e20b..e8dfee2bd 100644
--- a/src/pkg/testing/testing.go
+++ b/src/pkg/testing/testing.go
@@ -67,13 +67,13 @@ func (t *T) FailNow() {
// Log formats its arguments using default formatting, analogous to Print(),
// and records the text in the error log.
func (t *T) Log(args ...) {
- t.errors += "\t" + tabify(fmt.Sprintln(args));
+ t.errors += "\t"+tabify(fmt.Sprintln(args));
}
// Log formats its arguments according to the format, analogous to Printf(),
// and records the text in the error log.
func (t *T) Logf(format string, args ...) {
- t.errors += "\t" + tabify(fmt.Sprintf(format, args));
+ t.errors += "\t"+tabify(fmt.Sprintf(format, args));
}
// Error is equivalent to Log() followed by Fail().
diff --git a/src/pkg/time/time.go b/src/pkg/time/time.go
index 83ee535bd..3b626bbab 100644
--- a/src/pkg/time/time.go
+++ b/src/pkg/time/time.go
@@ -211,8 +211,8 @@ func (t *Time) Seconds() int64 {
sec := day * secondsPerDay;
// Add in time elapsed today.
- sec += int64(t.Hour) * 3600;
- sec += int64(t.Minute) * 60;
+ sec += int64(t.Hour)*3600;
+ sec += int64(t.Minute)*60;
sec += int64(t.Second);
// Convert from seconds since 2001 to seconds since 1970.
diff --git a/src/pkg/time/time_test.go b/src/pkg/time/time_test.go
index 2c80a716d..8a90f5d7a 100644
--- a/src/pkg/time/time_test.go
+++ b/src/pkg/time/time_test.go
@@ -5,9 +5,9 @@
package time_test
import (
- "os";
- "testing";
- . "time";
+ "os";
+ "testing";
+ . "time";
)
func init() {
@@ -18,35 +18,35 @@ func init() {
}
type TimeTest struct {
- seconds int64;
- golden Time;
+ seconds int64;
+ golden Time;
}
-var utctests = []TimeTest {
+var utctests = []TimeTest{
TimeTest{0, Time{1970, 1, 1, 0, 0, 0, Thursday, 0, "UTC"}},
TimeTest{1221681866, Time{2008, 9, 17, 20, 4, 26, Wednesday, 0, "UTC"}},
TimeTest{-1221681866, Time{1931, 4, 16, 3, 55, 34, Thursday, 0, "UTC"}},
TimeTest{1e18, Time{31688740476, 10, 23, 1, 46, 40, Friday, 0, "UTC"}},
TimeTest{-1e18, Time{-31688736537, 3, 10, 22, 13, 20, Tuesday, 0, "UTC"}},
TimeTest{0x7fffffffffffffff, Time{292277026596, 12, 4, 15, 30, 7, Sunday, 0, "UTC"}},
- TimeTest{-0x8000000000000000, Time{-292277022657, 1, 27, 8, 29, 52, Sunday, 0, "UTC"}}
+ TimeTest{-0x8000000000000000, Time{-292277022657, 1, 27, 8, 29, 52, Sunday, 0, "UTC"}},
}
-var localtests = []TimeTest {
- TimeTest{0, Time{1969, 12, 31, 16, 0, 0, Wednesday, -8*60*60, "PST"}},
- TimeTest{1221681866, Time{2008, 9, 17, 13, 4, 26, Wednesday, -7*60*60, "PDT"}}
+var localtests = []TimeTest{
+ TimeTest{0, Time{1969, 12, 31, 16, 0, 0, Wednesday, -8 * 60 * 60, "PST"}},
+ TimeTest{1221681866, Time{2008, 9, 17, 13, 4, 26, Wednesday, -7 * 60 * 60, "PDT"}},
}
func same(t, u *Time) bool {
- return t.Year == u.Year
- && t.Month == u.Month
- && t.Day == u.Day
- && t.Hour == u.Hour
- && t.Minute == u.Minute
- && t.Second == u.Second
- && t.Weekday == u.Weekday
- && t.ZoneOffset == u.ZoneOffset
- && t.Zone == u.Zone
+ return t.Year == u.Year &&
+ t.Month == u.Month &&
+ t.Day == u.Day &&
+ t.Hour == u.Hour &&
+ t.Minute == u.Minute &&
+ t.Second == u.Second &&
+ t.Weekday == u.Weekday &&
+ t.ZoneOffset == u.ZoneOffset &&
+ t.Zone == u.Zone;
}
func TestSecondsToUTC(t *testing.T) {
@@ -82,4 +82,3 @@ func TestSecondsToLocalTime(t *testing.T) {
}
}
}
-
diff --git a/src/pkg/time/zoneinfo.go b/src/pkg/time/zoneinfo.go
index 0d21cadce..b2b719cfd 100644
--- a/src/pkg/time/zoneinfo.go
+++ b/src/pkg/time/zoneinfo.go
@@ -34,7 +34,7 @@ func (d *data) read(n int) []byte {
return nil;
}
p := d.p[0:n];
- d.p = d.p[n : len(d.p)];
+ d.p = d.p[n:len(d.p)];
return p;
}
diff --git a/src/pkg/unicode/script_test.go b/src/pkg/unicode/script_test.go
index 64e205ccf..c212e3335 100644
--- a/src/pkg/unicode/script_test.go
+++ b/src/pkg/unicode/script_test.go
@@ -5,8 +5,8 @@
package unicode_test
import (
- "testing";
- . "unicode";
+ "testing";
+ . "unicode";
)
type T struct {
@@ -16,92 +16,91 @@ type T struct {
// Hand-chosen tests from Unicode 5.1.0, mostly to discover when new
// scripts and categories arise.
-var inTest = []T {
- T{0x06e2, "Arabic"},
- T{0x0567, "Armenian"},
- T{0x1b37, "Balinese"},
- T{0x09c2, "Bengali"},
- T{0x3115, "Bopomofo"},
- T{0x282d, "Braille"},
- T{0x1a1a, "Buginese"},
- T{0x1747, "Buhid"},
- T{0x156d, "Canadian_Aboriginal"},
- T{0x102a9, "Carian"},
- T{0xaa4d, "Cham"},
- T{0x13c2, "Cherokee"},
- T{0x0020, "Common"},
- T{0x1d4a5, "Common"},
- T{0x2cfc, "Coptic"},
- T{0x12420, "Cuneiform"},
- T{0x1080c, "Cypriot"},
- T{0xa663, "Cyrillic"},
- T{0x10430, "Deseret"},
- T{0x094a, "Devanagari"},
- T{0x1271, "Ethiopic"},
- T{0x10fc, "Georgian"},
- T{0x2c40, "Glagolitic"},
- T{0x10347, "Gothic"},
- T{0x03ae, "Greek"},
- T{0x0abf, "Gujarati"},
- T{0x0a24, "Gurmukhi"},
- T{0x3028, "Han"},
- T{0x11b8, "Hangul"},
- T{0x1727, "Hanunoo"},
- T{0x05a0, "Hebrew"},
- T{0x3058, "Hiragana"},
- T{0x20e6, "Inherited"},
- T{0x0cbd, "Kannada"},
- T{0x30a6, "Katakana"},
- T{0xa928, "Kayah_Li"},
- T{0x10a11, "Kharoshthi"},
- T{0x17c6, "Khmer"},
- T{0x0eaa, "Lao"},
- T{0x1d79, "Latin"},
- T{0x1c10, "Lepcha"},
- T{0x1930, "Limbu"},
- T{0x1003c, "Linear_B"},
- T{0x10290, "Lycian"},
- T{0x10930, "Lydian"},
- T{0x0d42, "Malayalam"},
- T{0x1822, "Mongolian"},
- T{0x104c, "Myanmar"},
- T{0x19c3, "New_Tai_Lue"},
- T{0x07f8, "Nko"},
- T{0x169b, "Ogham"},
- T{0x1c6a, "Ol_Chiki"},
- T{0x10310, "Old_Italic"},
- T{0x103c9, "Old_Persian"},
- T{0x0b3e, "Oriya"},
- T{0x10491, "Osmanya"},
- T{0xa860, "Phags_Pa"},
- T{0x10918, "Phoenician"},
- T{0xa949, "Rejang"},
- T{0x16c0, "Runic"},
- T{0xa892, "Saurashtra"},
- T{0x10463, "Shavian"},
- T{0x0dbd, "Sinhala"},
- T{0x1ba3, "Sundanese"},
- T{0xa803, "Syloti_Nagri"},
- T{0x070f, "Syriac"},
- T{0x170f, "Tagalog"},
- T{0x176f, "Tagbanwa"},
- T{0x1972, "Tai_Le"},
- T{0x0bbf, "Tamil"},
- T{0x0c55, "Telugu"},
- T{0x07a7, "Thaana"},
- T{0x0e46, "Thai"},
- T{0x0f36, "Tibetan"},
- T{0x2d55, "Tifinagh"},
- T{0x10388, "Ugaritic"},
- T{0xa60e, "Vai"},
- T{0xa216, "Yi"},
+var inTest = []T{
+ T{0x06e2, "Arabic"},
+ T{0x0567, "Armenian"},
+ T{0x1b37, "Balinese"},
+ T{0x09c2, "Bengali"},
+ T{0x3115, "Bopomofo"},
+ T{0x282d, "Braille"},
+ T{0x1a1a, "Buginese"},
+ T{0x1747, "Buhid"},
+ T{0x156d, "Canadian_Aboriginal"},
+ T{0x102a9, "Carian"},
+ T{0xaa4d, "Cham"},
+ T{0x13c2, "Cherokee"},
+ T{0x0020, "Common"},
+ T{0x1d4a5, "Common"},
+ T{0x2cfc, "Coptic"},
+ T{0x12420, "Cuneiform"},
+ T{0x1080c, "Cypriot"},
+ T{0xa663, "Cyrillic"},
+ T{0x10430, "Deseret"},
+ T{0x094a, "Devanagari"},
+ T{0x1271, "Ethiopic"},
+ T{0x10fc, "Georgian"},
+ T{0x2c40, "Glagolitic"},
+ T{0x10347, "Gothic"},
+ T{0x03ae, "Greek"},
+ T{0x0abf, "Gujarati"},
+ T{0x0a24, "Gurmukhi"},
+ T{0x3028, "Han"},
+ T{0x11b8, "Hangul"},
+ T{0x1727, "Hanunoo"},
+ T{0x05a0, "Hebrew"},
+ T{0x3058, "Hiragana"},
+ T{0x20e6, "Inherited"},
+ T{0x0cbd, "Kannada"},
+ T{0x30a6, "Katakana"},
+ T{0xa928, "Kayah_Li"},
+ T{0x10a11, "Kharoshthi"},
+ T{0x17c6, "Khmer"},
+ T{0x0eaa, "Lao"},
+ T{0x1d79, "Latin"},
+ T{0x1c10, "Lepcha"},
+ T{0x1930, "Limbu"},
+ T{0x1003c, "Linear_B"},
+ T{0x10290, "Lycian"},
+ T{0x10930, "Lydian"},
+ T{0x0d42, "Malayalam"},
+ T{0x1822, "Mongolian"},
+ T{0x104c, "Myanmar"},
+ T{0x19c3, "New_Tai_Lue"},
+ T{0x07f8, "Nko"},
+ T{0x169b, "Ogham"},
+ T{0x1c6a, "Ol_Chiki"},
+ T{0x10310, "Old_Italic"},
+ T{0x103c9, "Old_Persian"},
+ T{0x0b3e, "Oriya"},
+ T{0x10491, "Osmanya"},
+ T{0xa860, "Phags_Pa"},
+ T{0x10918, "Phoenician"},
+ T{0xa949, "Rejang"},
+ T{0x16c0, "Runic"},
+ T{0xa892, "Saurashtra"},
+ T{0x10463, "Shavian"},
+ T{0x0dbd, "Sinhala"},
+ T{0x1ba3, "Sundanese"},
+ T{0xa803, "Syloti_Nagri"},
+ T{0x070f, "Syriac"},
+ T{0x170f, "Tagalog"},
+ T{0x176f, "Tagbanwa"},
+ T{0x1972, "Tai_Le"},
+ T{0x0bbf, "Tamil"},
+ T{0x0c55, "Telugu"},
+ T{0x07a7, "Thaana"},
+ T{0x0e46, "Thai"},
+ T{0x0f36, "Tibetan"},
+ T{0x2d55, "Tifinagh"},
+ T{0x10388, "Ugaritic"},
+ T{0xa60e, "Vai"},
+ T{0xa216, "Yi"},
}
-var outTest = []T { // not really worth being thorough
- T{0x20, "Telugu"}
-}
+var outTest = []T{ // not really worth being thorough
+T{0x20, "Telugu"}}
-var inCategoryTest = []T {
+var inCategoryTest = []T{
T{0x0081, "Cc"},
T{0x17b4, "Cf"},
T{0xf0000, "Co"},
@@ -134,7 +133,7 @@ var inCategoryTest = []T {
T{0x04aa, "letter"},
}
-var inPropTest = []T {
+var inPropTest = []T{
T{0x0046, "ASCII_Hex_Digit"},
T{0x200F, "Bidi_Control"},
T{0x2212, "Dash"},
@@ -170,18 +169,18 @@ var inPropTest = []T {
}
func TestScripts(t *testing.T) {
- notTested := make(map[string] bool);
+ notTested := make(map[string]bool);
for k := range Scripts {
- notTested[k] = true
+ notTested[k] = true;
}
for _, test := range inTest {
if _, ok := Scripts[test.script]; !ok {
- t.Fatal(test.script, "not a known script")
+ t.Fatal(test.script, "not a known script");
}
if !Is(Scripts[test.script], test.rune) {
t.Errorf("IsScript(%#x, %s) = false, want true\n", test.rune, test.script);
}
- notTested[test.script] = false, false
+ notTested[test.script] = false, false;
}
for _, test := range outTest {
if Is(Scripts[test.script], test.rune) {
@@ -189,44 +188,44 @@ func TestScripts(t *testing.T) {
}
}
for k := range notTested {
- t.Error("not tested:", k)
+ t.Error("not tested:", k);
}
}
func TestCategories(t *testing.T) {
- notTested := make(map[string] bool);
+ notTested := make(map[string]bool);
for k := range Categories {
- notTested[k] = true
+ notTested[k] = true;
}
for _, test := range inCategoryTest {
if _, ok := Categories[test.script]; !ok {
- t.Fatal(test.script, "not a known category")
+ t.Fatal(test.script, "not a known category");
}
if !Is(Categories[test.script], test.rune) {
t.Errorf("IsCategory(%#x, %s) = false, want true\n", test.rune, test.script);
}
- notTested[test.script] = false, false
+ notTested[test.script] = false, false;
}
for k := range notTested {
- t.Error("not tested:", k)
+ t.Error("not tested:", k);
}
}
func TestProperties(t *testing.T) {
- notTested := make(map[string] bool);
+ notTested := make(map[string]bool);
for k := range Properties {
- notTested[k] = true
+ notTested[k] = true;
}
for _, test := range inPropTest {
if _, ok := Properties[test.script]; !ok {
- t.Fatal(test.script, "not a known prop")
+ t.Fatal(test.script, "not a known prop");
}
if !Is(Properties[test.script], test.rune) {
t.Errorf("IsCategory(%#x, %s) = false, want true\n", test.rune, test.script);
}
- notTested[test.script] = false, false
+ notTested[test.script] = false, false;
}
for k := range notTested {
- t.Error("not tested:", k)
+ t.Error("not tested:", k);
}
}
diff --git a/src/pkg/utf8/utf8.go b/src/pkg/utf8/utf8.go
index c8dc61304..18ae7cf58 100644
--- a/src/pkg/utf8/utf8.go
+++ b/src/pkg/utf8/utf8.go
@@ -10,28 +10,28 @@ import "unicode" // only needed for a couple of constants
// Numbers fundamental to the encoding.
const (
- RuneError = unicode.ReplacementChar; // the "error" Rune or "replacement character".
- RuneSelf = 0x80; // characters below Runeself are represented as themselves in a single byte.
- UTFMax = 4; // maximum number of bytes of a UTF-8 encoded Unicode character.
+ RuneError = unicode.ReplacementChar; // the "error" Rune or "replacement character".
+ RuneSelf = 0x80; // characters below Runeself are represented as themselves in a single byte.
+ UTFMax = 4; // maximum number of bytes of a UTF-8 encoded Unicode character.
)
const (
- _T1 = 0x00; // 0000 0000
- _Tx = 0x80; // 1000 0000
- _T2 = 0xC0; // 1100 0000
- _T3 = 0xE0; // 1110 0000
- _T4 = 0xF0; // 1111 0000
- _T5 = 0xF8; // 1111 1000
-
- _Maskx = 0x3F; // 0011 1111
- _Mask2 = 0x1F; // 0001 1111
- _Mask3 = 0x0F; // 0000 1111
- _Mask4 = 0x07; // 0000 0111
-
- _Rune1Max = 1<<7 - 1;
- _Rune2Max = 1<<11 - 1;
- _Rune3Max = 1<<16 - 1;
- _Rune4Max = 1<<21 - 1;
+ _T1 = 0x00; // 0000 0000
+ _Tx = 0x80; // 1000 0000
+ _T2 = 0xC0; // 1100 0000
+ _T3 = 0xE0; // 1110 0000
+ _T4 = 0xF0; // 1111 0000
+ _T5 = 0xF8; // 1111 1000
+
+ _Maskx = 0x3F; // 0011 1111
+ _Mask2 = 0x1F; // 0001 1111
+ _Mask3 = 0x0F; // 0000 1111
+ _Mask4 = 0x07; // 0000 0111
+
+ _Rune1Max = 1<<7 - 1;
+ _Rune2Max = 1<<11 - 1;
+ _Rune3Max = 1<<16 - 1;
+ _Rune4Max = 1<<21 - 1;
)
func decodeRuneInternal(p []byte) (rune, size int, short bool) {
@@ -43,70 +43,70 @@ func decodeRuneInternal(p []byte) (rune, size int, short bool) {
// 1-byte, 7-bit sequence?
if c0 < _Tx {
- return int(c0), 1, false
+ return int(c0), 1, false;
}
// unexpected continuation byte?
if c0 < _T2 {
- return RuneError, 1, false
+ return RuneError, 1, false;
}
// need first continuation byte
if n < 2 {
- return RuneError, 1, true
+ return RuneError, 1, true;
}
c1 := p[1];
if c1 < _Tx || _T2 <= c1 {
- return RuneError, 1, false
+ return RuneError, 1, false;
}
// 2-byte, 11-bit sequence?
if c0 < _T3 {
rune = int(c0&_Mask2)<<6 | int(c1&_Maskx);
if rune <= _Rune1Max {
- return RuneError, 1, false
+ return RuneError, 1, false;
}
- return rune, 2, false
+ return rune, 2, false;
}
// need second continuation byte
if n < 3 {
- return RuneError, 1, true
+ return RuneError, 1, true;
}
c2 := p[2];
if c2 < _Tx || _T2 <= c2 {
- return RuneError, 1, false
+ return RuneError, 1, false;
}
// 3-byte, 16-bit sequence?
if c0 < _T4 {
rune = int(c0&_Mask3)<<12 | int(c1&_Maskx)<<6 | int(c2&_Maskx);
if rune <= _Rune2Max {
- return RuneError, 1, false
+ return RuneError, 1, false;
}
- return rune, 3, false
+ return rune, 3, false;
}
// need third continuation byte
if n < 4 {
- return RuneError, 1, true
+ return RuneError, 1, true;
}
c3 := p[3];
if c3 < _Tx || _T2 <= c3 {
- return RuneError, 1, false
+ return RuneError, 1, false;
}
// 4-byte, 21-bit sequence?
if c0 < _T5 {
rune = int(c0&_Mask4)<<18 | int(c1&_Maskx)<<12 | int(c2&_Maskx)<<6 | int(c3&_Maskx);
if rune <= _Rune3Max {
- return RuneError, 1, false
+ return RuneError, 1, false;
}
- return rune, 4, false
+ return rune, 4, false;
}
// error
- return RuneError, 1, false
+ return RuneError, 1, false;
}
func decodeRuneInStringInternal(s string) (rune, size int, short bool) {
@@ -118,83 +118,83 @@ func decodeRuneInStringInternal(s string) (rune, size int, short bool) {
// 1-byte, 7-bit sequence?
if c0 < _Tx {
- return int(c0), 1, false
+ return int(c0), 1, false;
}
// unexpected continuation byte?
if c0 < _T2 {
- return RuneError, 1, false
+ return RuneError, 1, false;
}
// need first continuation byte
if n < 2 {
- return RuneError, 1, true
+ return RuneError, 1, true;
}
c1 := s[1];
if c1 < _Tx || _T2 <= c1 {
- return RuneError, 1, false
+ return RuneError, 1, false;
}
// 2-byte, 11-bit sequence?
if c0 < _T3 {
rune = int(c0&_Mask2)<<6 | int(c1&_Maskx);
if rune <= _Rune1Max {
- return RuneError, 1, false
+ return RuneError, 1, false;
}
- return rune, 2, false
+ return rune, 2, false;
}
// need second continuation byte
if n < 3 {
- return RuneError, 1, true
+ return RuneError, 1, true;
}
c2 := s[2];
if c2 < _Tx || _T2 <= c2 {
- return RuneError, 1, false
+ return RuneError, 1, false;
}
// 3-byte, 16-bit sequence?
if c0 < _T4 {
rune = int(c0&_Mask3)<<12 | int(c1&_Maskx)<<6 | int(c2&_Maskx);
if rune <= _Rune2Max {
- return RuneError, 1, false
+ return RuneError, 1, false;
}
- return rune, 3, false
+ return rune, 3, false;
}
// need third continuation byte
if n < 4 {
- return RuneError, 1, true
+ return RuneError, 1, true;
}
c3 := s[3];
if c3 < _Tx || _T2 <= c3 {
- return RuneError, 1, false
+ return RuneError, 1, false;
}
// 4-byte, 21-bit sequence?
if c0 < _T5 {
rune = int(c0&_Mask4)<<18 | int(c1&_Maskx)<<12 | int(c2&_Maskx)<<6 | int(c3&_Maskx);
if rune <= _Rune3Max {
- return RuneError, 1, false
+ return RuneError, 1, false;
}
- return rune, 4, false
+ return rune, 4, false;
}
// error
- return RuneError, 1, false
+ return RuneError, 1, false;
}
// FullRune reports whether the bytes in p begin with a full UTF-8 encoding of a rune.
// An invalid encoding is considered a full Rune since it will convert as a width-1 error rune.
func FullRune(p []byte) bool {
_, _, short := decodeRuneInternal(p);
- return !short
+ return !short;
}
// FullRuneInString is like FullRune but its input is a string.
func FullRuneInString(s string) bool {
_, _, short := decodeRuneInStringInternal(s);
- return !short
+ return !short;
}
// DecodeRune unpacks the first UTF-8 encoding in p and returns the rune and its width in bytes.
@@ -233,23 +233,23 @@ func EncodeRune(rune int, p []byte) int {
}
if rune <= _Rune2Max {
- p[0] = _T2 | byte(rune>>6);
+ p[0] = _T2|byte(rune>>6);
p[1] = _Tx | byte(rune)&_Maskx;
return 2;
}
if rune > unicode.MaxRune {
- rune = RuneError
+ rune = RuneError;
}
if rune <= _Rune3Max {
- p[0] = _T3 | byte(rune>>12);
+ p[0] = _T3|byte(rune>>12);
p[1] = _Tx | byte(rune>>6)&_Maskx;
p[2] = _Tx | byte(rune)&_Maskx;
return 3;
}
- p[0] = _T4 | byte(rune>>18);
+ p[0] = _T4|byte(rune>>18);
p[1] = _Tx | byte(rune>>12)&_Maskx;
p[2] = _Tx | byte(rune>>6)&_Maskx;
p[3] = _Tx | byte(rune)&_Maskx;
@@ -292,5 +292,5 @@ func RuneCountInString(s string) int {
// an encoded rune. Second and subsequent bytes always have the top
// two bits set to 10.
func RuneStart(b byte) bool {
- return b & 0xC0 != 0x80
+ return b&0xC0 != 0x80;
}
diff --git a/src/pkg/utf8/utf8_test.go b/src/pkg/utf8/utf8_test.go
index 9151ad9e0..f18eff8d6 100644
--- a/src/pkg/utf8/utf8_test.go
+++ b/src/pkg/utf8/utf8_test.go
@@ -120,7 +120,7 @@ func TestDecodeRune(t *testing.T) {
if rune != RuneError || size != wantsize {
t.Errorf("DecodeRune(%q) = 0x%04x, %d want 0x%04x, %d", b[0 : len(b)-1], rune, size, RuneError, wantsize);
}
- s = m.str[0 : len(m.str) - 1];
+ s = m.str[0 : len(m.str)-1];
rune, size = DecodeRuneInString(s);
if rune != RuneError || size != wantsize {
t.Errorf("DecodeRuneInString(%q) = 0x%04x, %d want 0x%04x, %d", s, rune, size, RuneError, wantsize);
diff --git a/src/pkg/xml/xml.go b/src/pkg/xml/xml.go
index 224d60743..5baaafbb3 100644
--- a/src/pkg/xml/xml.go
+++ b/src/pkg/xml/xml.go
@@ -27,6 +27,7 @@ import (
// A SyntaxError represents a syntax error in the XML input stream.
type SyntaxError string
+
func (e SyntaxError) String() string {
return "XML syntax error: " + string(e);
}
@@ -42,8 +43,8 @@ type Name struct {
// An Attr represents an attribute in an XML element (Name=Value).
type Attr struct {
- Name Name;
- Value string;
+ Name Name;
+ Value string;
}
// A Token is an interface holding one of the token types:
@@ -52,12 +53,12 @@ type Token interface{}
// A StartElement represents an XML start element.
type StartElement struct {
- Name Name;
- Attr []Attr;
+ Name Name;
+ Attr []Attr;
}
// An EndElement represents an XML end element.
-type EndElement struct {
+type EndElement struct {
Name Name;
}
@@ -86,8 +87,8 @@ func (c Comment) Copy() Comment {
// A ProcInst represents an XML processing instruction of the form <?target inst?>
type ProcInst struct {
- Target string;
- Inst []byte;
+ Target string;
+ Inst []byte;
}
func (p ProcInst) Copy() ProcInst {
@@ -104,23 +105,23 @@ func (d Directive) Copy() Directive {
}
type readByter interface {
- ReadByte() (b byte, err os.Error)
+ ReadByte() (b byte, err os.Error);
}
// A Parser represents an XML parser reading a particular input stream.
// The parser assumes that its input is encoded in UTF-8.
type Parser struct {
- r readByter;
- buf bytes.Buffer;
- stk *stack;
- free *stack;
- needClose bool;
- toClose Name;
- nextByte int;
- ns map[string]string;
- err os.Error;
- line int;
- tmp [32]byte;
+ r readByter;
+ buf bytes.Buffer;
+ stk *stack;
+ free *stack;
+ needClose bool;
+ toClose Name;
+ nextByte int;
+ ns map[string]string;
+ err os.Error;
+ line int;
+ tmp [32]byte;
}
// NewParser creates a new XML parser reading from r.
@@ -230,14 +231,14 @@ func (p *Parser) translate(n *Name, isElementName bool) {
// ending a given tag are *below* it on the stack, which is
// more work but forced on us by XML.
type stack struct {
- next *stack;
- kind int;
- name Name;
- ok bool;
+ next *stack;
+ kind int;
+ name Name;
+ ok bool;
}
const (
- stkStart = iota;
+ stkStart = iota;
stkNs;
)
@@ -388,7 +389,7 @@ func (p *Parser) RawToken() (Token, os.Error) {
b0 = b;
}
data := p.buf.Bytes();
- data = data[0:len(data)-2]; // chop ?>
+ data = data[0 : len(data)-2]; // chop ?>
return ProcInst{target, data}, nil;
case '!':
@@ -397,8 +398,8 @@ func (p *Parser) RawToken() (Token, os.Error) {
return nil, p.err;
}
switch b {
- case '-': // <!-
- // Probably <!-- for a comment.
+ case '-': // <!-
+ // Probably <!-- for a comment.
if b, ok = p.getc(); !ok {
return nil, p.err;
}
@@ -423,11 +424,11 @@ func (p *Parser) RawToken() (Token, os.Error) {
b0, b1 = b1, b;
}
data := p.buf.Bytes();
- data = data[0:len(data)-3]; // chop -->
+ data = data[0 : len(data)-3]; // chop -->
return Comment(data), nil;
- case '[': // <![
- // Probably <![CDATA[.
+ case '[': // <![
+ // Probably <![CDATA[.
for i := 0; i < 7; i++ {
if b, ok = p.getc(); !ok {
return nil, p.err;
@@ -465,9 +466,9 @@ func (p *Parser) RawToken() (Token, os.Error) {
p.ungetc(b);
var (
- name Name;
- empty bool;
- attr []Attr;
+ name Name;
+ empty bool;
+ attr []Attr;
)
if name, ok = p.nsname(); !ok {
if p.err == nil {
@@ -506,7 +507,7 @@ func (p *Parser) RawToken() (Token, os.Error) {
}
attr = nattr;
}
- attr = attr[0:n+1];
+ attr = attr[0 : n+1];
a := &attr[n];
if a.Name, ok = p.nsname(); !ok {
if p.err == nil {
@@ -591,7 +592,7 @@ func (p *Parser) ungetc(b byte) {
p.nextByte = int(b);
}
-var entity = map[string]int {
+var entity = map[string]int{
"lt": '<',
"gt": '>',
"amp": '&',
@@ -688,7 +689,7 @@ Input:
b0, b1 = b1, b;
}
data := p.buf.Bytes();
- data = data[0:len(data)-trunc];
+ data = data[0 : len(data)-trunc];
// Must rewrite \r and \r\n into \n.
w := 0;
@@ -718,7 +719,7 @@ func (p *Parser) nsname() (name Name, ok bool) {
name.Local = s;
} else {
name.Space = s[0:i];
- name.Local = s[i+1:len(s)];
+ name.Local = s[i+1 : len(s)];
}
return name, true;
}