summaryrefslogtreecommitdiff
path: root/src/pkg/http
diff options
context:
space:
mode:
Diffstat (limited to 'src/pkg/http')
-rw-r--r--src/pkg/http/Makefile2
-rw-r--r--src/pkg/http/client.go70
-rw-r--r--src/pkg/http/fs.go143
-rw-r--r--src/pkg/http/fs_test.go172
-rw-r--r--src/pkg/http/lex_test.go10
-rw-r--r--src/pkg/http/pprof/Makefile2
-rw-r--r--src/pkg/http/pprof/pprof.go26
-rw-r--r--src/pkg/http/readrequest_test.go20
-rw-r--r--src/pkg/http/request.go113
-rw-r--r--src/pkg/http/request_test.go60
-rw-r--r--src/pkg/http/requestwrite_test.go26
-rw-r--r--src/pkg/http/response.go8
-rw-r--r--src/pkg/http/response_test.go46
-rw-r--r--src/pkg/http/responsewrite_test.go19
-rw-r--r--src/pkg/http/serve_test.go135
-rw-r--r--src/pkg/http/server.go542
-rw-r--r--src/pkg/http/status.go6
-rw-r--r--src/pkg/http/testdata/file1
-rw-r--r--src/pkg/http/transfer.go23
-rw-r--r--src/pkg/http/triv.go77
-rw-r--r--src/pkg/http/url.go287
-rw-r--r--src/pkg/http/url_test.go377
22 files changed, 1554 insertions, 611 deletions
diff --git a/src/pkg/http/Makefile b/src/pkg/http/Makefile
index 235ff0279..7e4f80c28 100644
--- a/src/pkg/http/Makefile
+++ b/src/pkg/http/Makefile
@@ -2,7 +2,7 @@
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
-include ../../Make.$(GOARCH)
+include ../../Make.inc
TARG=http
GOFILES=\
diff --git a/src/pkg/http/client.go b/src/pkg/http/client.go
index 54487dac2..29678ee32 100644
--- a/src/pkg/http/client.go
+++ b/src/pkg/http/client.go
@@ -8,11 +8,14 @@ package http
import (
"bufio"
+ "bytes"
+ "crypto/tls"
"encoding/base64"
"fmt"
"io"
"net"
"os"
+ "strconv"
"strings"
)
@@ -21,7 +24,7 @@ import (
func hasPort(s string) bool { return strings.LastIndex(s, ":") > strings.LastIndex(s, "]") }
// Used in Send to implement io.ReadCloser by bundling together the
-// io.BufReader through which we read the response, and the underlying
+// bufio.Reader through which we read the response, and the underlying
// network connection.
type readClose struct {
io.Reader
@@ -34,15 +37,15 @@ type readClose struct {
// send() method is nonpublic because, when we refactor the code for persistent
// connections, it may no longer make sense to have a method with this signature.
func send(req *Request) (resp *Response, err os.Error) {
- if req.URL.Scheme != "http" {
+ if req.URL.Scheme != "http" && req.URL.Scheme != "https" {
return nil, &badStringError{"unsupported protocol scheme", req.URL.Scheme}
}
addr := req.URL.Host
if !hasPort(addr) {
- addr += ":http"
+ addr += ":" + req.URL.Scheme
}
- info := req.URL.Userinfo
+ info := req.URL.RawUserinfo
if len(info) > 0 {
enc := base64.URLEncoding
encoded := make([]byte, enc.EncodedLen(len(info)))
@@ -52,9 +55,25 @@ func send(req *Request) (resp *Response, err os.Error) {
}
req.Header["Authorization"] = "Basic " + string(encoded)
}
- conn, err := net.Dial("tcp", "", addr)
- if err != nil {
- return nil, err
+
+ var conn io.ReadWriteCloser
+ if req.URL.Scheme == "http" {
+ conn, err = net.Dial("tcp", "", addr)
+ if err != nil {
+ return nil, err
+ }
+ } else { // https
+ conn, err = tls.Dial("tcp", "", addr, nil)
+ if err != nil {
+ return nil, err
+ }
+ h := req.URL.Host
+ if hasPort(h) {
+ h = h[0:strings.LastIndex(h, ":")]
+ }
+ if err := conn.(*tls.Conn).VerifyHostname(h); err != nil {
+ return nil, err
+ }
}
err = req.Write(conn)
@@ -111,6 +130,7 @@ func Get(url string) (r *Response, finalURL string, err os.Error) {
if req.URL, err = ParseURL(url); err != nil {
break
}
+ url = req.URL.String()
if r, err = send(&req); err != nil {
break
}
@@ -153,6 +173,41 @@ func Post(url string, bodyType string, body io.Reader) (r *Response, err os.Erro
return send(&req)
}
+// PostForm issues a POST to the specified URL,
+// with data's keys and values urlencoded as the request body.
+//
+// Caller should close r.Body when done reading it.
+func PostForm(url string, data map[string]string) (r *Response, err os.Error) {
+ var req Request
+ req.Method = "POST"
+ req.ProtoMajor = 1
+ req.ProtoMinor = 1
+ req.Close = true
+ body := urlencode(data)
+ req.Body = nopCloser{body}
+ req.Header = map[string]string{
+ "Content-Type": "application/x-www-form-urlencoded",
+ "Content-Length": strconv.Itoa(body.Len()),
+ }
+ req.ContentLength = int64(body.Len())
+
+ req.URL, err = ParseURL(url)
+ if err != nil {
+ return nil, err
+ }
+
+ return send(&req)
+}
+
+// TODO: remove this function when PostForm takes a multimap.
+func urlencode(data map[string]string) (b *bytes.Buffer) {
+ m := make(map[string][]string, len(data))
+ for k, v := range data {
+ m[k] = []string{v}
+ }
+ return bytes.NewBuffer([]byte(EncodeQuery(m)))
+}
+
// Head issues a HEAD to the specified URL.
func Head(url string) (r *Response, err os.Error) {
var req Request
@@ -160,6 +215,7 @@ func Head(url string) (r *Response, err os.Error) {
if req.URL, err = ParseURL(url); err != nil {
return
}
+ url = req.URL.String()
if r, err = send(&req); err != nil {
return
}
diff --git a/src/pkg/http/fs.go b/src/pkg/http/fs.go
index 40bb3d138..143a839a8 100644
--- a/src/pkg/http/fs.go
+++ b/src/pkg/http/fs.go
@@ -12,7 +12,9 @@ import (
"mime"
"os"
"path"
+ "strconv"
"strings"
+ "time"
"utf8"
)
@@ -25,7 +27,7 @@ func isText(b []byte) bool {
// decoding error
return false
}
- if 0x80 <= rune && rune <= 0x9F {
+ if 0x7F <= rune && rune <= 0x9F {
return false
}
if rune < ' ' {
@@ -42,8 +44,8 @@ func isText(b []byte) bool {
return true
}
-func dirList(c *Conn, f *os.File) {
- fmt.Fprintf(c, "<pre>\n")
+func dirList(w ResponseWriter, f *os.File) {
+ fmt.Fprintf(w, "<pre>\n")
for {
dirs, err := f.Readdir(100)
if err != nil || len(dirs) == 0 {
@@ -55,26 +57,25 @@ func dirList(c *Conn, f *os.File) {
name += "/"
}
// TODO htmlescape
- fmt.Fprintf(c, "<a href=\"%s\">%s</a>\n", name, name)
+ fmt.Fprintf(w, "<a href=\"%s\">%s</a>\n", name, name)
}
}
- fmt.Fprintf(c, "</pre>\n")
+ fmt.Fprintf(w, "</pre>\n")
}
-
-func serveFileInternal(c *Conn, r *Request, name string, redirect bool) {
+func serveFile(w ResponseWriter, r *Request, name string, redirect bool) {
const indexPage = "/index.html"
// redirect .../index.html to .../
if strings.HasSuffix(r.URL.Path, indexPage) {
- Redirect(c, r.URL.Path[0:len(r.URL.Path)-len(indexPage)+1], StatusMovedPermanently)
+ Redirect(w, r, r.URL.Path[0:len(r.URL.Path)-len(indexPage)+1], StatusMovedPermanently)
return
}
f, err := os.Open(name, os.O_RDONLY, 0)
if err != nil {
// TODO expose actual error?
- NotFound(c, r)
+ NotFound(w, r)
return
}
defer f.Close()
@@ -82,7 +83,7 @@ func serveFileInternal(c *Conn, r *Request, name string, redirect bool) {
d, err1 := f.Stat()
if err1 != nil {
// TODO expose actual error?
- NotFound(c, r)
+ NotFound(w, r)
return
}
@@ -92,17 +93,23 @@ func serveFileInternal(c *Conn, r *Request, name string, redirect bool) {
url := r.URL.Path
if d.IsDirectory() {
if url[len(url)-1] != '/' {
- Redirect(c, url+"/", StatusMovedPermanently)
+ Redirect(w, r, url+"/", StatusMovedPermanently)
return
}
} else {
if url[len(url)-1] == '/' {
- Redirect(c, url[0:len(url)-1], StatusMovedPermanently)
+ Redirect(w, r, url[0:len(url)-1], StatusMovedPermanently)
return
}
}
}
+ if t, _ := time.Parse(TimeFormat, r.Header["If-Modified-Since"]); t != nil && d.Mtime_ns/1e9 <= t.Seconds() {
+ w.WriteHeader(StatusNotModified)
+ return
+ }
+ w.SetHeader("Last-Modified", time.SecondsToUTC(d.Mtime_ns/1e9).Format(TimeFormat))
+
// use contents of index.html for directory, if present
if d.IsDirectory() {
index := name + indexPage
@@ -119,33 +126,60 @@ func serveFileInternal(c *Conn, r *Request, name string, redirect bool) {
}
if d.IsDirectory() {
- dirList(c, f)
+ dirList(w, f)
return
}
// serve file
+ size := d.Size
+ code := StatusOK
+
// use extension to find content type.
ext := path.Ext(name)
if ctype := mime.TypeByExtension(ext); ctype != "" {
- c.SetHeader("Content-Type", ctype)
+ w.SetHeader("Content-Type", ctype)
} else {
// read first chunk to decide between utf-8 text and binary
var buf [1024]byte
- n, _ := io.ReadFull(f, buf[0:])
- b := buf[0:n]
+ n, _ := io.ReadFull(f, buf[:])
+ b := buf[:n]
if isText(b) {
- c.SetHeader("Content-Type", "text-plain; charset=utf-8")
+ w.SetHeader("Content-Type", "text-plain; charset=utf-8")
} else {
- c.SetHeader("Content-Type", "application/octet-stream") // generic binary
+ w.SetHeader("Content-Type", "application/octet-stream") // generic binary
+ }
+ f.Seek(0, 0) // rewind to output whole file
+ }
+
+ // handle Content-Range header.
+ // TODO(adg): handle multiple ranges
+ ranges, err := parseRange(r.Header["Range"], size)
+ if err != nil || len(ranges) > 1 {
+ Error(w, err.String(), StatusRequestedRangeNotSatisfiable)
+ return
+ }
+ if len(ranges) == 1 {
+ ra := ranges[0]
+ if _, err := f.Seek(ra.start, 0); err != nil {
+ Error(w, err.String(), StatusRequestedRangeNotSatisfiable)
+ return
}
- c.Write(b)
+ size = ra.length
+ code = StatusPartialContent
+ w.SetHeader("Content-Range", fmt.Sprintf("%d-%d/%d", ra.start, ra.start+ra.length, d.Size))
}
- io.Copy(c, f)
+
+ w.SetHeader("Accept-Ranges", "bytes")
+ w.SetHeader("Content-Length", strconv.Itoa64(size))
+
+ w.WriteHeader(code)
+
+ io.Copyn(w, f, size)
}
// ServeFile replies to the request with the contents of the named file or directory.
-func ServeFile(c *Conn, r *Request, name string) {
- serveFileInternal(c, r, name, false)
+func ServeFile(w ResponseWriter, r *Request, name string) {
+ serveFile(w, r, name, false)
}
type fileHandler struct {
@@ -159,12 +193,71 @@ type fileHandler struct {
// looking up the file name in the file system.
func FileServer(root, prefix string) Handler { return &fileHandler{root, prefix} }
-func (f *fileHandler) ServeHTTP(c *Conn, r *Request) {
+func (f *fileHandler) ServeHTTP(w ResponseWriter, r *Request) {
path := r.URL.Path
if !strings.HasPrefix(path, f.prefix) {
- NotFound(c, r)
+ NotFound(w, r)
return
}
path = path[len(f.prefix):]
- serveFileInternal(c, r, f.root+"/"+path, true)
+ serveFile(w, r, f.root+"/"+path, true)
+}
+
+// httpRange specifies the byte range to be sent to the client.
+type httpRange struct {
+ start, length int64
+}
+
+// parseRange parses a Range header string as per RFC 2616.
+func parseRange(s string, size int64) ([]httpRange, os.Error) {
+ if s == "" {
+ return nil, nil // header not present
+ }
+ const b = "bytes="
+ if !strings.HasPrefix(s, b) {
+ return nil, os.NewError("invalid range")
+ }
+ var ranges []httpRange
+ for _, ra := range strings.Split(s[len(b):], ",", -1) {
+ i := strings.Index(ra, "-")
+ if i < 0 {
+ return nil, os.NewError("invalid range")
+ }
+ start, end := ra[:i], ra[i+1:]
+ var r httpRange
+ if start == "" {
+ // If no start is specified, end specifies the
+ // range start relative to the end of the file.
+ i, err := strconv.Atoi64(end)
+ if err != nil {
+ return nil, os.NewError("invalid range")
+ }
+ if i > size {
+ i = size
+ }
+ r.start = size - i
+ r.length = size - r.start
+ } else {
+ i, err := strconv.Atoi64(start)
+ if err != nil || i > size || i < 0 {
+ return nil, os.NewError("invalid range")
+ }
+ r.start = i
+ if end == "" {
+ // If no end is specified, range extends to end of the file.
+ r.length = size - r.start
+ } else {
+ i, err := strconv.Atoi64(end)
+ if err != nil || r.start > i {
+ return nil, os.NewError("invalid range")
+ }
+ if i >= size {
+ i = size - 1
+ }
+ r.length = i - r.start + 1
+ }
+ }
+ ranges = append(ranges, r)
+ }
+ return ranges, nil
}
diff --git a/src/pkg/http/fs_test.go b/src/pkg/http/fs_test.go
new file mode 100644
index 000000000..0f7135692
--- /dev/null
+++ b/src/pkg/http/fs_test.go
@@ -0,0 +1,172 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package http
+
+import (
+ "fmt"
+ "io/ioutil"
+ "net"
+ "os"
+ "sync"
+ "testing"
+)
+
+var ParseRangeTests = []struct {
+ s string
+ length int64
+ r []httpRange
+}{
+ {"", 0, nil},
+ {"foo", 0, nil},
+ {"bytes=", 0, nil},
+ {"bytes=5-4", 10, nil},
+ {"bytes=0-2,5-4", 10, nil},
+ {"bytes=0-9", 10, []httpRange{{0, 10}}},
+ {"bytes=0-", 10, []httpRange{{0, 10}}},
+ {"bytes=5-", 10, []httpRange{{5, 5}}},
+ {"bytes=0-20", 10, []httpRange{{0, 10}}},
+ {"bytes=15-,0-5", 10, nil},
+ {"bytes=-5", 10, []httpRange{{5, 5}}},
+ {"bytes=-15", 10, []httpRange{{0, 10}}},
+ {"bytes=0-499", 10000, []httpRange{{0, 500}}},
+ {"bytes=500-999", 10000, []httpRange{{500, 500}}},
+ {"bytes=-500", 10000, []httpRange{{9500, 500}}},
+ {"bytes=9500-", 10000, []httpRange{{9500, 500}}},
+ {"bytes=0-0,-1", 10000, []httpRange{{0, 1}, {9999, 1}}},
+ {"bytes=500-600,601-999", 10000, []httpRange{{500, 101}, {601, 399}}},
+ {"bytes=500-700,601-999", 10000, []httpRange{{500, 201}, {601, 399}}},
+}
+
+func TestParseRange(t *testing.T) {
+ for _, test := range ParseRangeTests {
+ r := test.r
+ ranges, err := parseRange(test.s, test.length)
+ if err != nil && r != nil {
+ t.Errorf("parseRange(%q) returned error %q", test.s, err)
+ }
+ if len(ranges) != len(r) {
+ t.Errorf("len(parseRange(%q)) = %d, want %d", test.s, len(ranges), len(r))
+ continue
+ }
+ for i := range r {
+ if ranges[i].start != r[i].start {
+ t.Errorf("parseRange(%q)[%d].start = %d, want %d", test.s, i, ranges[i].start, r[i].start)
+ }
+ if ranges[i].length != r[i].length {
+ t.Errorf("parseRange(%q)[%d].length = %d, want %d", test.s, i, ranges[i].length, r[i].length)
+ }
+ }
+ }
+}
+
+const (
+ testFile = "testdata/file"
+ testFileLength = 11
+)
+
+var (
+ serverOnce sync.Once
+ serverAddr string
+)
+
+func startServer(t *testing.T) {
+ serverOnce.Do(func() {
+ HandleFunc("/ServeFile", func(w ResponseWriter, r *Request) {
+ ServeFile(w, r, "testdata/file")
+ })
+ l, err := net.Listen("tcp", "127.0.0.1:0")
+ if err != nil {
+ t.Fatal("listen:", err)
+ }
+ serverAddr = l.Addr().String()
+ go Serve(l, nil)
+ })
+}
+
+var ServeFileRangeTests = []struct {
+ start, end int
+ r string
+ code int
+}{
+ {0, testFileLength, "", StatusOK},
+ {0, 5, "0-4", StatusPartialContent},
+ {2, testFileLength, "2-", StatusPartialContent},
+ {testFileLength - 5, testFileLength, "-5", StatusPartialContent},
+ {3, 8, "3-7", StatusPartialContent},
+ {0, 0, "20-", StatusRequestedRangeNotSatisfiable},
+}
+
+func TestServeFile(t *testing.T) {
+ startServer(t)
+ var err os.Error
+
+ file, err := ioutil.ReadFile(testFile)
+ if err != nil {
+ t.Fatal("reading file:", err)
+ }
+
+ // set up the Request (re-used for all tests)
+ var req Request
+ req.Header = make(map[string]string)
+ if req.URL, err = ParseURL("http://" + serverAddr + "/ServeFile"); err != nil {
+ t.Fatal("ParseURL:", err)
+ }
+ req.Method = "GET"
+
+ // straight GET
+ _, body := getBody(t, req)
+ if !equal(body, file) {
+ t.Fatalf("body mismatch: got %q, want %q", body, file)
+ }
+
+ // Range tests
+ for _, rt := range ServeFileRangeTests {
+ req.Header["Range"] = "bytes=" + rt.r
+ if rt.r == "" {
+ req.Header["Range"] = ""
+ }
+ r, body := getBody(t, req)
+ if r.StatusCode != rt.code {
+ t.Errorf("range=%q: StatusCode=%d, want %d", rt.r, r.StatusCode, rt.code)
+ }
+ if rt.code == StatusRequestedRangeNotSatisfiable {
+ continue
+ }
+ h := fmt.Sprintf("%d-%d/%d", rt.start, rt.end, testFileLength)
+ if rt.r == "" {
+ h = ""
+ }
+ if r.Header["Content-Range"] != h {
+ t.Errorf("header mismatch: range=%q: got %q, want %q", rt.r, r.Header["Content-Range"], h)
+ }
+ if !equal(body, file[rt.start:rt.end]) {
+ t.Errorf("body mismatch: range=%q: got %q, want %q", rt.r, body, file[rt.start:rt.end])
+ }
+ }
+}
+
+func getBody(t *testing.T, req Request) (*Response, []byte) {
+ r, err := send(&req)
+ if err != nil {
+ t.Fatal(req.URL.String(), "send:", err)
+ }
+ b, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ t.Fatal("reading Body:", err)
+ }
+ return r, b
+}
+
+func equal(a, b []byte) bool {
+ if len(a) != len(b) {
+ return false
+ }
+ for i := range a {
+ if a[i] != b[i] {
+ return false
+ }
+ }
+ return true
+}
diff --git a/src/pkg/http/lex_test.go b/src/pkg/http/lex_test.go
index 043430cb7..5386f7534 100644
--- a/src/pkg/http/lex_test.go
+++ b/src/pkg/http/lex_test.go
@@ -15,29 +15,29 @@ type lexTest struct {
}
var lexTests = []lexTest{
- lexTest{
+ {
Raw: `"abc"def,:ghi`,
Parsed: 13,
Result: []string{"abcdef", "ghi"},
},
// My understanding of the RFC is that escape sequences outside of
// quotes are not interpreted?
- lexTest{
+ {
Raw: `"\t"\t"\t"`,
Parsed: 10,
Result: []string{"\t", "t\t"},
},
- lexTest{
+ {
Raw: `"\yab"\r\n`,
Parsed: 10,
Result: []string{"?ab", "r", "n"},
},
- lexTest{
+ {
Raw: "ab\f",
Parsed: 3,
Result: []string{"ab?"},
},
- lexTest{
+ {
Raw: "\"ab \" c,de f, gh, ij\n\t\r",
Parsed: 23,
Result: []string{"ab ", "c", "de", "f", "gh", "ij"},
diff --git a/src/pkg/http/pprof/Makefile b/src/pkg/http/pprof/Makefile
index e0315112f..5858a0efa 100644
--- a/src/pkg/http/pprof/Makefile
+++ b/src/pkg/http/pprof/Makefile
@@ -2,7 +2,7 @@
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
-include ../../../Make.$(GOARCH)
+include ../../../Make.inc
TARG=http/pprof
GOFILES=\
diff --git a/src/pkg/http/pprof/pprof.go b/src/pkg/http/pprof/pprof.go
index 38d91afbf..f7db9aab9 100644
--- a/src/pkg/http/pprof/pprof.go
+++ b/src/pkg/http/pprof/pprof.go
@@ -40,28 +40,28 @@ func init() {
// Cmdline responds with the running program's
// command line, with arguments separated by NUL bytes.
// The package initialization registers it as /debug/pprof/cmdline.
-func Cmdline(c *http.Conn, r *http.Request) {
- c.SetHeader("content-type", "text/plain; charset=utf-8")
- fmt.Fprintf(c, strings.Join(os.Args, "\x00"))
+func Cmdline(w http.ResponseWriter, r *http.Request) {
+ w.SetHeader("content-type", "text/plain; charset=utf-8")
+ fmt.Fprintf(w, strings.Join(os.Args, "\x00"))
}
// Heap responds with the pprof-formatted heap profile.
// The package initialization registers it as /debug/pprof/heap.
-func Heap(c *http.Conn, r *http.Request) {
- c.SetHeader("content-type", "text/plain; charset=utf-8")
- pprof.WriteHeapProfile(c)
+func Heap(w http.ResponseWriter, r *http.Request) {
+ w.SetHeader("content-type", "text/plain; charset=utf-8")
+ pprof.WriteHeapProfile(w)
}
// Symbol looks up the program counters listed in the request,
// responding with a table mapping program counters to function names.
// The package initialization registers it as /debug/pprof/symbol.
-func Symbol(c *http.Conn, r *http.Request) {
- c.SetHeader("content-type", "text/plain; charset=utf-8")
+func Symbol(w http.ResponseWriter, r *http.Request) {
+ w.SetHeader("content-type", "text/plain; charset=utf-8")
// We don't know how many symbols we have, but we
// do have symbol information. Pprof only cares whether
// this number is 0 (no symbols available) or > 0.
- fmt.Fprintf(c, "num_symbols: 1\n")
+ fmt.Fprintf(w, "num_symbols: 1\n")
var b *bufio.Reader
if r.Method == "POST" {
@@ -71,15 +71,15 @@ func Symbol(c *http.Conn, r *http.Request) {
}
for {
- w, err := b.ReadSlice('+')
+ word, err := b.ReadSlice('+')
if err == nil {
- w = w[0 : len(w)-1] // trim +
+ word = word[0 : len(word)-1] // trim +
}
- pc, _ := strconv.Btoui64(string(w), 0)
+ pc, _ := strconv.Btoui64(string(word), 0)
if pc != 0 {
f := runtime.FuncForPC(uintptr(pc))
if f != nil {
- fmt.Fprintf(c, "%#x %s\n", pc, f.Name())
+ fmt.Fprintf(w, "%#x %s\n", pc, f.Name())
}
}
diff --git a/src/pkg/http/readrequest_test.go b/src/pkg/http/readrequest_test.go
index 7654dbfc7..067e17dda 100644
--- a/src/pkg/http/readrequest_test.go
+++ b/src/pkg/http/readrequest_test.go
@@ -20,7 +20,7 @@ type reqTest struct {
var reqTests = []reqTest{
// Baseline test; All Request fields included for template use
- reqTest{
+ {
"GET http://www.techcrunch.com/ HTTP/1.1\r\n" +
"Host: www.techcrunch.com\r\n" +
"User-Agent: Fake\r\n" +
@@ -37,15 +37,15 @@ var reqTests = []reqTest{
Method: "GET",
RawURL: "http://www.techcrunch.com/",
URL: &URL{
- Raw: "http://www.techcrunch.com/",
- Scheme: "http",
- RawPath: "/",
- Authority: "www.techcrunch.com",
- Userinfo: "",
- Host: "www.techcrunch.com",
- Path: "/",
- RawQuery: "",
- Fragment: "",
+ Raw: "http://www.techcrunch.com/",
+ Scheme: "http",
+ RawPath: "/",
+ RawAuthority: "www.techcrunch.com",
+ RawUserinfo: "",
+ Host: "www.techcrunch.com",
+ Path: "/",
+ RawQuery: "",
+ Fragment: "",
},
Proto: "HTTP/1.1",
ProtoMajor: 1,
diff --git a/src/pkg/http/request.go b/src/pkg/http/request.go
index 8a72d6cfa..b88689988 100644
--- a/src/pkg/http/request.go
+++ b/src/pkg/http/request.go
@@ -16,6 +16,8 @@ import (
"fmt"
"io"
"io/ioutil"
+ "mime"
+ "mime/multipart"
"os"
"strconv"
"strings"
@@ -40,6 +42,8 @@ var (
ErrNotSupported = &ProtocolError{"feature not supported"}
ErrUnexpectedTrailer = &ProtocolError{"trailer header without chunked transfer encoding"}
ErrMissingContentLength = &ProtocolError{"missing ContentLength in HEAD response"}
+ ErrNotMultipart = &ProtocolError{"request Content-Type isn't multipart/form-data"}
+ ErrMissingBoundary = &ProtocolError{"no multipart boundary param Content-Type"}
)
type badStringError struct {
@@ -67,7 +71,7 @@ type Request struct {
ProtoMajor int // 1
ProtoMinor int // 0
- // A header mapping request lines to their values.
+ // A header maps request lines to their values.
// If the header says
//
// accept-encoding: gzip, deflate
@@ -139,6 +143,24 @@ func (r *Request) ProtoAtLeast(major, minor int) bool {
r.ProtoMajor == major && r.ProtoMinor >= minor
}
+// MultipartReader returns a MIME multipart reader if this is a
+// multipart/form-data POST request, else returns nil and an error.
+func (r *Request) MultipartReader() (multipart.Reader, os.Error) {
+ v, ok := r.Header["Content-Type"]
+ if !ok {
+ return nil, ErrNotMultipart
+ }
+ d, params := mime.ParseMediaType(v)
+ if d != "multipart/form-data" {
+ return nil, ErrNotMultipart
+ }
+ boundary, ok := params["boundary"]
+ if !ok {
+ return nil, ErrMissingBoundary
+ }
+ return multipart.NewReader(r.Body, boundary), nil
+}
+
// Return value if nonempty, def otherwise.
func valueOrDefault(value, def string) string {
if value != "" {
@@ -169,7 +191,7 @@ func (req *Request) Write(w io.Writer) os.Error {
uri := req.RawURL
if uri == "" {
- uri = valueOrDefault(urlEscape(req.URL.Path, false), "/")
+ uri = valueOrDefault(urlEscape(req.URL.Path, encodePath), "/")
if req.URL.RawQuery != "" {
uri += "?" + req.URL.RawQuery
}
@@ -227,6 +249,8 @@ func readLineBytes(b *bufio.Reader) (p []byte, err os.Error) {
// If the caller asked for a line, there should be a line.
if err == os.EOF {
err = io.ErrUnexpectedEOF
+ } else if err == bufio.ErrBufferFull {
+ err = ErrLineTooLong
}
return nil, err
}
@@ -275,7 +299,7 @@ func readKeyValue(b *bufio.Reader) (key, value string, err os.Error) {
}
key = string(line[0:i])
- if strings.Index(key, " ") >= 0 {
+ if strings.Contains(key, " ") {
// Key field has space - no good.
goto Malformed
}
@@ -360,29 +384,30 @@ func parseHTTPVersion(vers string) (int, int, bool) {
return major, minor, true
}
-var cmap = make(map[string]string)
-
// CanonicalHeaderKey returns the canonical format of the
// HTTP header key s. The canonicalization converts the first
// letter and any letter following a hyphen to upper case;
// the rest are converted to lowercase. For example, the
// canonical key for "accept-encoding" is "Accept-Encoding".
func CanonicalHeaderKey(s string) string {
- if t, ok := cmap[s]; ok {
- return t
- }
-
// canonicalize: first letter upper case
// and upper case after each dash.
// (Host, User-Agent, If-Modified-Since).
// HTTP headers are ASCII only, so no Unicode issues.
- a := []byte(s)
+ var a []byte
upper := true
- for i, v := range a {
+ for i := 0; i < len(s); i++ {
+ v := s[i]
if upper && 'a' <= v && v <= 'z' {
+ if a == nil {
+ a = []byte(s)
+ }
a[i] = v + 'A' - 'a'
}
if !upper && 'A' <= v && v <= 'Z' {
+ if a == nil {
+ a = []byte(s)
+ }
a[i] = v + 'a' - 'A'
}
upper = false
@@ -390,9 +415,10 @@ func CanonicalHeaderKey(s string) string {
upper = true
}
}
- t := string(a)
- cmap[s] = t
- return t
+ if a != nil {
+ return string(a)
+ }
+ return s
}
type chunkedReader struct {
@@ -566,9 +592,22 @@ func ReadRequest(b *bufio.Reader) (req *Request, err os.Error) {
return req, nil
}
+// ParseQuery parses the URL-encoded query string and returns
+// a map listing the values specified for each key.
+// ParseQuery always returns a non-nil map containing all the
+// valid query parameters found; err describes the first decoding error
+// encountered, if any.
func ParseQuery(query string) (m map[string][]string, err os.Error) {
m = make(map[string][]string)
+ err = parseQuery(m, query)
+ return
+}
+
+func parseQuery(m map[string][]string, query string) (err os.Error) {
for _, kv := range strings.Split(query, "&", -1) {
+ if len(kv) == 0 {
+ continue
+ }
kvPair := strings.Split(kv, "=", 2)
var key, value string
@@ -579,14 +618,13 @@ func ParseQuery(query string) (m map[string][]string, err os.Error) {
}
if e != nil {
err = e
+ continue
}
-
vec := vector.StringVector(m[key])
vec.Push(value)
m[key] = vec
}
-
- return
+ return err
}
// ParseForm parses the request body as a form for POST requests, or the raw query for GET requests.
@@ -596,32 +634,34 @@ func (r *Request) ParseForm() (err os.Error) {
return
}
- var query string
- switch r.Method {
- case "GET":
- query = r.URL.RawQuery
- case "POST":
+ r.Form = make(map[string][]string)
+ if r.URL != nil {
+ err = parseQuery(r.Form, r.URL.RawQuery)
+ }
+ if r.Method == "POST" {
if r.Body == nil {
- r.Form = make(map[string][]string)
return os.ErrorString("missing form body")
}
ct := r.Header["Content-Type"]
switch strings.Split(ct, ";", 2)[0] {
case "text/plain", "application/x-www-form-urlencoded", "":
- var b []byte
- if b, err = ioutil.ReadAll(r.Body); err != nil {
- r.Form = make(map[string][]string)
- return err
+ b, e := ioutil.ReadAll(r.Body)
+ if e != nil {
+ if err == nil {
+ err = e
+ }
+ break
+ }
+ e = parseQuery(r.Form, string(b))
+ if err == nil {
+ err = e
}
- query = string(b)
// TODO(dsymonds): Handle multipart/form-data
default:
- r.Form = make(map[string][]string)
return &badStringError{"unknown Content-Type", ct}
}
}
- r.Form, err = ParseQuery(query)
- return
+ return err
}
// FormValue returns the first value for the named component of the query.
@@ -640,3 +680,14 @@ func (r *Request) expectsContinue() bool {
expectation, ok := r.Header["Expect"]
return ok && strings.ToLower(expectation) == "100-continue"
}
+
+func (r *Request) wantsHttp10KeepAlive() bool {
+ if r.ProtoMajor != 1 || r.ProtoMinor != 0 {
+ return false
+ }
+ value, exists := r.Header["Connection"]
+ if !exists {
+ return false
+ }
+ return strings.Contains(strings.ToLower(value), "keep-alive")
+}
diff --git a/src/pkg/http/request_test.go b/src/pkg/http/request_test.go
index 98d5342bb..d25e5e5e7 100644
--- a/src/pkg/http/request_test.go
+++ b/src/pkg/http/request_test.go
@@ -6,6 +6,9 @@ package http
import (
"bytes"
+ "reflect"
+ "regexp"
+ "strings"
"testing"
)
@@ -17,15 +20,15 @@ type parseTest struct {
}
var parseTests = []parseTest{
- parseTest{
+ {
query: "a=1&b=2",
out: stringMultimap{"a": []string{"1"}, "b": []string{"2"}},
},
- parseTest{
+ {
query: "a=1&a=2&a=banana",
out: stringMultimap{"a": []string{"1", "2", "banana"}},
},
- parseTest{
+ {
query: "ascii=%3Ckey%3A+0x90%3E",
out: stringMultimap{"ascii": []string{"<key: 0x90>"}},
},
@@ -68,6 +71,22 @@ func TestQuery(t *testing.T) {
}
}
+func TestPostQuery(t *testing.T) {
+ req := &Request{Method: "POST"}
+ req.URL, _ = ParseURL("http://www.google.com/search?q=foo&q=bar&both=x")
+ req.Header = map[string]string{"Content-Type": "application/x-www-form-urlencoded; boo!"}
+ req.Body = nopCloser{strings.NewReader("z=post&both=y")}
+ if q := req.FormValue("q"); q != "foo" {
+ t.Errorf(`req.FormValue("q") = %q, want "foo"`, q)
+ }
+ if z := req.FormValue("z"); z != "post" {
+ t.Errorf(`req.FormValue("z") = %q, want "post"`, z)
+ }
+ if both := req.Form["both"]; !reflect.DeepEqual(both, []string{"x", "y"}) {
+ t.Errorf(`req.FormValue("both") = %q, want ["x", "y"]`, both)
+ }
+}
+
type stringMap map[string]string
type parseContentTypeTest struct {
contentType stringMap
@@ -75,10 +94,10 @@ type parseContentTypeTest struct {
}
var parseContentTypeTests = []parseContentTypeTest{
- parseContentTypeTest{contentType: stringMap{"Content-Type": "text/plain"}},
- parseContentTypeTest{contentType: stringMap{"Content-Type": ""}},
- parseContentTypeTest{contentType: stringMap{"Content-Type": "text/plain; boundary="}},
- parseContentTypeTest{
+ {contentType: stringMap{"Content-Type": "text/plain"}},
+ {contentType: stringMap{"Content-Type": ""}},
+ {contentType: stringMap{"Content-Type": "text/plain; boundary="}},
+ {
contentType: stringMap{"Content-Type": "application/unknown"},
error: true,
},
@@ -101,17 +120,36 @@ func TestPostContentTypeParsing(t *testing.T) {
}
}
+func TestMultipartReader(t *testing.T) {
+ req := &Request{
+ Method: "POST",
+ Header: stringMap{"Content-Type": `multipart/form-data; boundary="foo123"`},
+ Body: nopCloser{new(bytes.Buffer)},
+ }
+ multipart, err := req.MultipartReader()
+ if multipart == nil {
+ t.Errorf("expected multipart; error: %v", err)
+ }
+
+ req.Header = stringMap{"Content-Type": "text/plain"}
+ multipart, err = req.MultipartReader()
+ if multipart != nil {
+ t.Errorf("unexpected multipart for text/plain")
+ }
+}
+
func TestRedirect(t *testing.T) {
const (
- start = "http://codesearch.google.com/"
- end = "http://www.google.com/codesearch"
+ start = "http://google.com/"
+ endRe = "^http://www\\.google\\.[a-z.]+/$"
)
+ var end = regexp.MustCompile(endRe)
r, url, err := Get(start)
if err != nil {
t.Fatal(err)
}
r.Body.Close()
- if r.StatusCode != 200 || url != end {
- t.Fatalf("Get(%s) got status %d at %s, want 200 at %s", start, r.StatusCode, url, end)
+ if r.StatusCode != 200 || !end.MatchString(url) {
+ t.Fatalf("Get(%s) got status %d at %q, want 200 matching %q", start, r.StatusCode, url, endRe)
}
}
diff --git a/src/pkg/http/requestwrite_test.go b/src/pkg/http/requestwrite_test.go
index 469df69d7..3ceabe4ee 100644
--- a/src/pkg/http/requestwrite_test.go
+++ b/src/pkg/http/requestwrite_test.go
@@ -16,20 +16,20 @@ type reqWriteTest struct {
var reqWriteTests = []reqWriteTest{
// HTTP/1.1 => chunked coding; no body; no trailer
- reqWriteTest{
+ {
Request{
Method: "GET",
RawURL: "http://www.techcrunch.com/",
URL: &URL{
- Raw: "http://www.techcrunch.com/",
- Scheme: "http",
- RawPath: "http://www.techcrunch.com/",
- Authority: "www.techcrunch.com",
- Userinfo: "",
- Host: "www.techcrunch.com",
- Path: "/",
- RawQuery: "",
- Fragment: "",
+ Raw: "http://www.techcrunch.com/",
+ Scheme: "http",
+ RawPath: "http://www.techcrunch.com/",
+ RawAuthority: "www.techcrunch.com",
+ RawUserinfo: "",
+ Host: "www.techcrunch.com",
+ Path: "/",
+ RawQuery: "",
+ Fragment: "",
},
Proto: "HTTP/1.1",
ProtoMajor: 1,
@@ -61,7 +61,7 @@ var reqWriteTests = []reqWriteTest{
"Proxy-Connection: keep-alive\r\n\r\n",
},
// HTTP/1.1 => chunked coding; body; empty trailer
- reqWriteTest{
+ {
Request{
Method: "GET",
URL: &URL{
@@ -83,7 +83,7 @@ var reqWriteTests = []reqWriteTest{
"6\r\nabcdef\r\n0\r\n\r\n",
},
// HTTP/1.1 POST => chunked coding; body; empty trailer
- reqWriteTest{
+ {
Request{
Method: "POST",
URL: &URL{
@@ -107,7 +107,7 @@ var reqWriteTests = []reqWriteTest{
"6\r\nabcdef\r\n0\r\n\r\n",
},
// default to HTTP/1.1
- reqWriteTest{
+ {
Request{
Method: "GET",
RawURL: "/search",
diff --git a/src/pkg/http/response.go b/src/pkg/http/response.go
index 6a209c9f8..a24726110 100644
--- a/src/pkg/http/response.go
+++ b/src/pkg/http/response.go
@@ -86,10 +86,14 @@ func ReadResponse(r *bufio.Reader, requestMethod string) (resp *Response, err os
return nil, err
}
f := strings.Split(line, " ", 3)
- if len(f) < 3 {
+ if len(f) < 2 {
return nil, &badStringError{"malformed HTTP response", line}
}
- resp.Status = f[1] + " " + f[2]
+ reasonPhrase := ""
+ if len(f) > 2 {
+ reasonPhrase = f[2]
+ }
+ resp.Status = f[1] + " " + reasonPhrase
resp.StatusCode, err = strconv.Atoi(f[1])
if err != nil {
return nil, &badStringError{"malformed HTTP status code", f[1]}
diff --git a/src/pkg/http/response_test.go b/src/pkg/http/response_test.go
index 889b770be..89a8c3b44 100644
--- a/src/pkg/http/response_test.go
+++ b/src/pkg/http/response_test.go
@@ -21,7 +21,7 @@ type respTest struct {
var respTests = []respTest{
// Unchunked response without Content-Length.
- respTest{
+ {
"HTTP/1.0 200 OK\r\n" +
"Connection: close\r\n" +
"\r\n" +
@@ -45,7 +45,7 @@ var respTests = []respTest{
},
// Unchunked response with Content-Length.
- respTest{
+ {
"HTTP/1.0 200 OK\r\n" +
"Content-Length: 10\r\n" +
"Connection: close\r\n" +
@@ -71,7 +71,7 @@ var respTests = []respTest{
},
// Chunked response without Content-Length.
- respTest{
+ {
"HTTP/1.0 200 OK\r\n" +
"Transfer-Encoding: chunked\r\n" +
"\r\n" +
@@ -97,7 +97,7 @@ var respTests = []respTest{
},
// Chunked response with Content-Length.
- respTest{
+ {
"HTTP/1.0 200 OK\r\n" +
"Transfer-Encoding: chunked\r\n" +
"Content-Length: 10\r\n" +
@@ -122,6 +122,44 @@ var respTests = []respTest{
"Body here\n",
},
+
+ // Status line without a Reason-Phrase, but trailing space.
+ // (permitted by RFC 2616)
+ {
+ "HTTP/1.0 303 \r\n\r\n",
+ Response{
+ Status: "303 ",
+ StatusCode: 303,
+ Proto: "HTTP/1.0",
+ ProtoMajor: 1,
+ ProtoMinor: 0,
+ RequestMethod: "GET",
+ Header: map[string]string{},
+ Close: true,
+ ContentLength: -1,
+ },
+
+ "",
+ },
+
+ // Status line without a Reason-Phrase, and no trailing space.
+ // (not permitted by RFC 2616, but we'll accept it anyway)
+ {
+ "HTTP/1.0 303\r\n\r\n",
+ Response{
+ Status: "303 ",
+ StatusCode: 303,
+ Proto: "HTTP/1.0",
+ ProtoMajor: 1,
+ ProtoMinor: 0,
+ RequestMethod: "GET",
+ Header: map[string]string{},
+ Close: true,
+ ContentLength: -1,
+ },
+
+ "",
+ },
}
func TestReadResponse(t *testing.T) {
diff --git a/src/pkg/http/responsewrite_test.go b/src/pkg/http/responsewrite_test.go
index 768064303..9f10be562 100644
--- a/src/pkg/http/responsewrite_test.go
+++ b/src/pkg/http/responsewrite_test.go
@@ -16,7 +16,7 @@ type respWriteTest struct {
var respWriteTests = []respWriteTest{
// HTTP/1.0, identity coding; no trailer
- respWriteTest{
+ {
Response{
StatusCode: 503,
ProtoMajor: 1,
@@ -31,8 +31,23 @@ var respWriteTests = []respWriteTest{
"Content-Length: 6\r\n\r\n" +
"abcdef",
},
+ // Unchunked response without Content-Length.
+ {
+ Response{
+ StatusCode: 200,
+ ProtoMajor: 1,
+ ProtoMinor: 0,
+ RequestMethod: "GET",
+ Header: map[string]string{},
+ Body: nopCloser{bytes.NewBufferString("abcdef")},
+ ContentLength: -1,
+ },
+ "HTTP/1.0 200 OK\r\n" +
+ "\r\n" +
+ "abcdef",
+ },
// HTTP/1.1, chunked coding; empty trailer; close
- respWriteTest{
+ {
Response{
StatusCode: 200,
ProtoMajor: 1,
diff --git a/src/pkg/http/serve_test.go b/src/pkg/http/serve_test.go
new file mode 100644
index 000000000..43e1b93a5
--- /dev/null
+++ b/src/pkg/http/serve_test.go
@@ -0,0 +1,135 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// End-to-end serving tests
+
+package http
+
+import (
+ "bytes"
+ "os"
+ "net"
+ "testing"
+)
+
+type dummyAddr string
+type oneConnListener struct {
+ conn net.Conn
+}
+
+func (l *oneConnListener) Accept() (c net.Conn, err os.Error) {
+ c = l.conn
+ if c == nil {
+ err = os.EOF
+ return
+ }
+ err = nil
+ l.conn = nil
+ return
+}
+
+func (l *oneConnListener) Close() os.Error {
+ return nil
+}
+
+func (l *oneConnListener) Addr() net.Addr {
+ return dummyAddr("test-address")
+}
+
+func (a dummyAddr) Network() string {
+ return string(a)
+}
+
+func (a dummyAddr) String() string {
+ return string(a)
+}
+
+type testConn struct {
+ readBuf bytes.Buffer
+ writeBuf bytes.Buffer
+}
+
+func (c *testConn) Read(b []byte) (int, os.Error) {
+ return c.readBuf.Read(b)
+}
+
+func (c *testConn) Write(b []byte) (int, os.Error) {
+ return c.writeBuf.Write(b)
+}
+
+func (c *testConn) Close() os.Error {
+ return nil
+}
+
+func (c *testConn) LocalAddr() net.Addr {
+ return dummyAddr("local-addr")
+}
+
+func (c *testConn) RemoteAddr() net.Addr {
+ return dummyAddr("remote-addr")
+}
+
+func (c *testConn) SetTimeout(nsec int64) os.Error {
+ return nil
+}
+
+func (c *testConn) SetReadTimeout(nsec int64) os.Error {
+ return nil
+}
+
+func (c *testConn) SetWriteTimeout(nsec int64) os.Error {
+ return nil
+}
+
+func TestConsumingBodyOnNextConn(t *testing.T) {
+ conn := new(testConn)
+ for i := 0; i < 2; i++ {
+ conn.readBuf.Write([]byte(
+ "POST / HTTP/1.1\r\n" +
+ "Host: test\r\n" +
+ "Content-Length: 11\r\n" +
+ "\r\n" +
+ "foo=1&bar=1"))
+ }
+
+ reqNum := 0
+ ch := make(chan *Request)
+ servech := make(chan os.Error)
+ listener := &oneConnListener{conn}
+ handler := func(res ResponseWriter, req *Request) {
+ reqNum++
+ t.Logf("Got request #%d: %v", reqNum, req)
+ ch <- req
+ }
+
+ go func() {
+ servech <- Serve(listener, HandlerFunc(handler))
+ }()
+
+ var req *Request
+ t.Log("Waiting for first request.")
+ req = <-ch
+ if req == nil {
+ t.Fatal("Got nil first request.")
+ }
+ if req.Method != "POST" {
+ t.Errorf("For request #1's method, got %q; expected %q",
+ req.Method, "POST")
+ }
+
+ t.Log("Waiting for second request.")
+ req = <-ch
+ if req == nil {
+ t.Fatal("Got nil first request.")
+ }
+ if req.Method != "POST" {
+ t.Errorf("For request #2's method, got %q; expected %q",
+ req.Method, "POST")
+ }
+
+ t.Log("Waiting for EOF.")
+ if serveerr := <-servech; serveerr != os.EOF {
+ t.Errorf("Serve returned %q; expected EOF", serveerr)
+ }
+}
diff --git a/src/pkg/http/server.go b/src/pkg/http/server.go
index 81ce98229..b8783da28 100644
--- a/src/pkg/http/server.go
+++ b/src/pkg/http/server.go
@@ -13,6 +13,8 @@ package http
import (
"bufio"
+ "crypto/rand"
+ "crypto/tls"
"fmt"
"io"
"log"
@@ -21,6 +23,7 @@ import (
"path"
"strconv"
"strings"
+ "time"
)
// Errors introduced by the HTTP server.
@@ -34,42 +37,93 @@ var (
// registered to serve a particular path or subtree
// in the HTTP server.
//
-// ServeHTTP should write reply headers and data to the Conn
+// ServeHTTP should write reply headers and data to the ResponseWriter
// and then return. Returning signals that the request is finished
// and that the HTTP server can move on to the next request on
// the connection.
type Handler interface {
- ServeHTTP(*Conn, *Request)
+ ServeHTTP(ResponseWriter, *Request)
}
-// A Conn represents the server side of a single active HTTP connection.
-type Conn struct {
- RemoteAddr string // network address of remote side
- Req *Request // current HTTP request
+// A ResponseWriter interface is used by an HTTP handler to
+// construct an HTTP response.
+type ResponseWriter interface {
+ // RemoteAddr returns the address of the client that sent the current request
+ RemoteAddr() string
+
+ // UsingTLS returns true if the client is connected using TLS
+ UsingTLS() bool
+
+ // SetHeader sets a header line in the eventual response.
+ // For example, SetHeader("Content-Type", "text/html; charset=utf-8")
+ // will result in the header line
+ //
+ // Content-Type: text/html; charset=utf-8
+ //
+ // being sent. UTF-8 encoded HTML is the default setting for
+ // Content-Type in this library, so users need not make that
+ // particular call. Calls to SetHeader after WriteHeader (or Write)
+ // are ignored.
+ SetHeader(string, string)
+
+ // Write writes the data to the connection as part of an HTTP reply.
+ // If WriteHeader has not yet been called, Write calls WriteHeader(http.StatusOK)
+ // before writing the data.
+ Write([]byte) (int, os.Error)
+
+ // WriteHeader sends an HTTP response header with status code.
+ // If WriteHeader is not called explicitly, the first call to Write
+ // will trigger an implicit WriteHeader(http.StatusOK).
+ // Thus explicit calls to WriteHeader are mainly used to
+ // send error codes.
+ WriteHeader(int)
+
+ // Flush sends any buffered data to the client.
+ Flush()
+
+ // Hijack lets the caller take over the connection.
+ // After a call to Hijack(), the HTTP server library
+ // will not do anything else with the connection.
+ // It becomes the caller's responsibility to manage
+ // and close the connection.
+ Hijack() (io.ReadWriteCloser, *bufio.ReadWriter, os.Error)
+}
- rwc io.ReadWriteCloser // i/o connection
- buf *bufio.ReadWriter // buffered rwc
- handler Handler // request handler
- hijacked bool // connection has been hijacked by handler
+// A conn represents the server side of an HTTP connection.
+type conn struct {
+ remoteAddr string // network address of remote side
+ handler Handler // request handler
+ rwc io.ReadWriteCloser // i/o connection
+ buf *bufio.ReadWriter // buffered rwc
+ hijacked bool // connection has been hijacked by handler
+ usingTLS bool // a flag indicating connection over TLS
+}
- // state for the current reply
- closeAfterReply bool // close connection after this reply
- chunking bool // using chunked transfer encoding for reply body
- wroteHeader bool // reply header has been written
- wroteContinue bool // 100 Continue response was written
- header map[string]string // reply header parameters
- written int64 // number of bytes written in body
- status int // status code passed to WriteHeader
+// A response represents the server side of an HTTP response.
+type response struct {
+ conn *conn
+ req *Request // request for this response
+ chunking bool // using chunked transfer encoding for reply body
+ wroteHeader bool // reply header has been written
+ wroteContinue bool // 100 Continue response was written
+ header map[string]string // reply header parameters
+ written int64 // number of bytes written in body
+ status int // status code passed to WriteHeader
+
+ // close connection after this reply. set on request and
+ // updated after response from handler if there's a
+ // "Connection: keep-alive" response header and a
+ // Content-Length.
+ closeAfterReply bool
}
// Create new connection from rwc.
-func newConn(rwc net.Conn, handler Handler) (c *Conn, err os.Error) {
- c = new(Conn)
- if a := rwc.RemoteAddr(); a != nil {
- c.RemoteAddr = a.String()
- }
+func newConn(rwc net.Conn, handler Handler) (c *conn, err os.Error) {
+ c = new(conn)
+ c.remoteAddr = rwc.RemoteAddr().String()
c.handler = handler
c.rwc = rwc
+ _, c.usingTLS = rwc.(*tls.Conn)
br := bufio.NewReader(rwc)
bw := bufio.NewWriter(rwc)
c.buf = bufio.NewReadWriter(br, bw)
@@ -79,17 +133,15 @@ func newConn(rwc net.Conn, handler Handler) (c *Conn, err os.Error) {
// wrapper around io.ReaderCloser which on first read, sends an
// HTTP/1.1 100 Continue header
type expectContinueReader struct {
- conn *Conn
+ resp *response
readCloser io.ReadCloser
}
func (ecr *expectContinueReader) Read(p []byte) (n int, err os.Error) {
- if !ecr.conn.wroteContinue && !ecr.conn.hijacked {
- ecr.conn.wroteContinue = true
- if ecr.conn.Req.ProtoAtLeast(1, 1) {
- io.WriteString(ecr.conn.buf, "HTTP/1.1 100 Continue\r\n\r\n")
- ecr.conn.buf.Flush()
- }
+ if !ecr.resp.wroteContinue && !ecr.resp.conn.hijacked {
+ ecr.resp.wroteContinue = true
+ io.WriteString(ecr.resp.conn.buf, "HTTP/1.1 100 Continue\r\n\r\n")
+ ecr.resp.conn.buf.Flush()
}
return ecr.readCloser.Read(p)
}
@@ -98,88 +150,88 @@ func (ecr *expectContinueReader) Close() os.Error {
return ecr.readCloser.Close()
}
+// TimeFormat is the time format to use with
+// time.Parse and time.Time.Format when parsing
+// or generating times in HTTP headers.
+// It is like time.RFC1123 but hard codes GMT as the time zone.
+const TimeFormat = "Mon, 02 Jan 2006 15:04:05 GMT"
+
// Read next request from connection.
-func (c *Conn) readRequest() (req *Request, err os.Error) {
+func (c *conn) readRequest() (w *response, err os.Error) {
if c.hijacked {
return nil, ErrHijacked
}
+ var req *Request
if req, err = ReadRequest(c.buf.Reader); err != nil {
return nil, err
}
- // Reset per-request connection state.
- c.header = make(map[string]string)
- c.wroteHeader = false
- c.wroteContinue = false
- c.Req = req
+ w = new(response)
+ w.conn = c
+ w.req = req
+ w.header = make(map[string]string)
// Expect 100 Continue support
- if req.expectsContinue() {
+ if req.expectsContinue() && req.ProtoAtLeast(1, 1) {
// Wrap the Body reader with one that replies on the connection
- req.Body = &expectContinueReader{readCloser: req.Body, conn: c}
+ req.Body = &expectContinueReader{readCloser: req.Body, resp: w}
}
// Default output is HTML encoded in UTF-8.
- c.SetHeader("Content-Type", "text/html; charset=utf-8")
+ w.SetHeader("Content-Type", "text/html; charset=utf-8")
+ w.SetHeader("Date", time.UTC().Format(TimeFormat))
if req.ProtoAtLeast(1, 1) {
// HTTP/1.1 or greater: use chunked transfer encoding
// to avoid closing the connection at EOF.
- c.chunking = true
- c.SetHeader("Transfer-Encoding", "chunked")
+ w.chunking = true
+ w.SetHeader("Transfer-Encoding", "chunked")
} else {
// HTTP version < 1.1: cannot do chunked transfer
// encoding, so signal EOF by closing connection.
- // Could avoid closing the connection if there is
- // a Content-Length: header in the response,
- // but everyone who expects persistent connections
- // does HTTP/1.1 now.
- c.closeAfterReply = true
- c.chunking = false
+ // Will be overridden if the HTTP handler ends up
+ // writing a Content-Length and the client requested
+ // "Connection: keep-alive"
+ w.closeAfterReply = true
}
- return req, nil
+ return w, nil
}
-// SetHeader sets a header line in the eventual reply.
-// For example, SetHeader("Content-Type", "text/html; charset=utf-8")
-// will result in the header line
-//
-// Content-Type: text/html; charset=utf-8
-//
-// being sent. UTF-8 encoded HTML is the default setting for
-// Content-Type in this library, so users need not make that
-// particular call. Calls to SetHeader after WriteHeader (or Write)
-// are ignored.
-func (c *Conn) SetHeader(hdr, val string) { c.header[CanonicalHeaderKey(hdr)] = val }
-
-// WriteHeader sends an HTTP response header with status code.
-// If WriteHeader is not called explicitly, the first call to Write
-// will trigger an implicit WriteHeader(http.StatusOK).
-// Thus explicit calls to WriteHeader are mainly used to
-// send error codes.
-func (c *Conn) WriteHeader(code int) {
- if c.hijacked {
- log.Stderr("http: Conn.WriteHeader on hijacked connection")
+// UsingTLS implements the ResponseWriter.UsingTLS
+func (w *response) UsingTLS() bool {
+ return w.conn.usingTLS
+}
+
+// RemoteAddr implements the ResponseWriter.RemoteAddr method
+func (w *response) RemoteAddr() string { return w.conn.remoteAddr }
+
+// SetHeader implements the ResponseWriter.SetHeader method
+func (w *response) SetHeader(hdr, val string) { w.header[CanonicalHeaderKey(hdr)] = val }
+
+// WriteHeader implements the ResponseWriter.WriteHeader method
+func (w *response) WriteHeader(code int) {
+ if w.conn.hijacked {
+ log.Print("http: response.WriteHeader on hijacked connection")
return
}
- if c.wroteHeader {
- log.Stderr("http: multiple Conn.WriteHeader calls")
+ if w.wroteHeader {
+ log.Print("http: multiple response.WriteHeader calls")
return
}
- c.wroteHeader = true
- c.status = code
+ w.wroteHeader = true
+ w.status = code
if code == StatusNotModified {
// Must not have body.
- c.header["Content-Type"] = "", false
- c.header["Transfer-Encoding"] = "", false
+ w.header["Content-Type"] = "", false
+ w.header["Transfer-Encoding"] = "", false
+ w.chunking = false
}
- c.written = 0
- if !c.Req.ProtoAtLeast(1, 0) {
+ if !w.req.ProtoAtLeast(1, 0) {
return
}
proto := "HTTP/1.0"
- if c.Req.ProtoAtLeast(1, 1) {
+ if w.req.ProtoAtLeast(1, 1) {
proto = "HTTP/1.1"
}
codestring := strconv.Itoa(code)
@@ -187,48 +239,55 @@ func (c *Conn) WriteHeader(code int) {
if !ok {
text = "status code " + codestring
}
- io.WriteString(c.buf, proto+" "+codestring+" "+text+"\r\n")
- for k, v := range c.header {
- io.WriteString(c.buf, k+": "+v+"\r\n")
+ io.WriteString(w.conn.buf, proto+" "+codestring+" "+text+"\r\n")
+ for k, v := range w.header {
+ io.WriteString(w.conn.buf, k+": "+v+"\r\n")
}
- io.WriteString(c.buf, "\r\n")
+ io.WriteString(w.conn.buf, "\r\n")
}
-// Write writes the data to the connection as part of an HTTP reply.
-// If WriteHeader has not yet been called, Write calls WriteHeader(http.StatusOK)
-// before writing the data.
-func (c *Conn) Write(data []byte) (n int, err os.Error) {
- if c.hijacked {
- log.Stderr("http: Conn.Write on hijacked connection")
+// Write implements the ResponseWriter.Write method
+func (w *response) Write(data []byte) (n int, err os.Error) {
+ if w.conn.hijacked {
+ log.Print("http: response.Write on hijacked connection")
return 0, ErrHijacked
}
- if !c.wroteHeader {
- c.WriteHeader(StatusOK)
+ if !w.wroteHeader {
+ if w.req.wantsHttp10KeepAlive() {
+ _, hasLength := w.header["Content-Length"]
+ if hasLength {
+ _, connectionHeaderSet := w.header["Connection"]
+ if !connectionHeaderSet {
+ w.header["Connection"] = "keep-alive"
+ }
+ }
+ }
+ w.WriteHeader(StatusOK)
}
if len(data) == 0 {
return 0, nil
}
- if c.status == StatusNotModified {
+ if w.status == StatusNotModified {
// Must not have body.
return 0, ErrBodyNotAllowed
}
- c.written += int64(len(data)) // ignoring errors, for errorKludge
+ w.written += int64(len(data)) // ignoring errors, for errorKludge
// TODO(rsc): if chunking happened after the buffering,
// then there would be fewer chunk headers.
// On the other hand, it would make hijacking more difficult.
- if c.chunking {
- fmt.Fprintf(c.buf, "%x\r\n", len(data)) // TODO(rsc): use strconv not fmt
+ if w.chunking {
+ fmt.Fprintf(w.conn.buf, "%x\r\n", len(data)) // TODO(rsc): use strconv not fmt
}
- n, err = c.buf.Write(data)
- if err == nil && c.chunking {
+ n, err = w.conn.buf.Write(data)
+ if err == nil && w.chunking {
if n != len(data) {
err = io.ErrShortWrite
}
if err == nil {
- io.WriteString(c.buf, "\r\n")
+ io.WriteString(w.conn.buf, "\r\n")
}
}
@@ -242,25 +301,25 @@ func (c *Conn) Write(data []byte) (n int, err os.Error) {
// long enough. The minimum lengths used in those
// browsers are in the 256-512 range.
// Pad to 1024 bytes.
-func errorKludge(c *Conn, req *Request) {
+func errorKludge(w *response) {
const min = 1024
// Is this an error?
- if kind := c.status / 100; kind != 4 && kind != 5 {
+ if kind := w.status / 100; kind != 4 && kind != 5 {
return
}
// Did the handler supply any info? Enough?
- if c.written == 0 || c.written >= min {
+ if w.written == 0 || w.written >= min {
return
}
// Is it a broken browser?
var msg string
- switch agent := req.UserAgent; {
- case strings.Index(agent, "MSIE") >= 0:
+ switch agent := w.req.UserAgent; {
+ case strings.Contains(agent, "MSIE"):
msg = "Internet Explorer"
- case strings.Index(agent, "Chrome/") >= 0:
+ case strings.Contains(agent, "Chrome/"):
msg = "Chrome"
default:
return
@@ -268,45 +327,54 @@ func errorKludge(c *Conn, req *Request) {
msg += " would ignore this error page if this text weren't here.\n"
// Is it text? ("Content-Type" is always in the map)
- baseType := strings.Split(c.header["Content-Type"], ";", 2)[0]
+ baseType := strings.Split(w.header["Content-Type"], ";", 2)[0]
switch baseType {
case "text/html":
- io.WriteString(c, "<!-- ")
- for c.written < min {
- io.WriteString(c, msg)
+ io.WriteString(w, "<!-- ")
+ for w.written < min {
+ io.WriteString(w, msg)
}
- io.WriteString(c, " -->")
+ io.WriteString(w, " -->")
case "text/plain":
- io.WriteString(c, "\n")
- for c.written < min {
- io.WriteString(c, msg)
+ io.WriteString(w, "\n")
+ for w.written < min {
+ io.WriteString(w, msg)
}
}
}
-func (c *Conn) finishRequest() {
- if !c.wroteHeader {
- c.WriteHeader(StatusOK)
+func (w *response) finishRequest() {
+ // If this was an HTTP/1.0 request with keep-alive and we sent a Content-Length
+ // back, we can make this a keep-alive response ...
+ if w.req.wantsHttp10KeepAlive() {
+ _, sentLength := w.header["Content-Length"]
+ if sentLength && w.header["Connection"] == "keep-alive" {
+ w.closeAfterReply = false
+ }
+ }
+ if !w.wroteHeader {
+ w.WriteHeader(StatusOK)
}
- errorKludge(c, c.Req)
- if c.chunking {
- io.WriteString(c.buf, "0\r\n")
+ errorKludge(w)
+ if w.chunking {
+ io.WriteString(w.conn.buf, "0\r\n")
// trailer key/value pairs, followed by blank line
- io.WriteString(c.buf, "\r\n")
+ io.WriteString(w.conn.buf, "\r\n")
}
- c.buf.Flush()
+ w.conn.buf.Flush()
+ w.req.Body.Close()
}
-// Flush sends any buffered data to the client.
-func (c *Conn) Flush() {
- if !c.wroteHeader {
- c.WriteHeader(StatusOK)
+// Flush implements the ResponseWriter.Flush method.
+func (w *response) Flush() {
+ if !w.wroteHeader {
+ w.WriteHeader(StatusOK)
}
- c.buf.Flush()
+ w.conn.buf.Flush()
}
// Close the connection.
-func (c *Conn) close() {
+func (c *conn) close() {
if c.buf != nil {
c.buf.Flush()
c.buf = nil
@@ -318,41 +386,39 @@ func (c *Conn) close() {
}
// Serve a new connection.
-func (c *Conn) serve() {
+func (c *conn) serve() {
for {
- req, err := c.readRequest()
+ w, err := c.readRequest()
if err != nil {
break
}
- // HTTP cannot have multiple simultaneous active requests.
+ // HTTP cannot have multiple simultaneous active requests.[*]
// Until the server replies to this request, it can't read another,
// so we might as well run the handler in this goroutine.
- c.handler.ServeHTTP(c, req)
+ // [*] Not strictly true: HTTP pipelining. We could let them all process
+ // in parallel even if their responses need to be serialized.
+ c.handler.ServeHTTP(w, w.req)
if c.hijacked {
return
}
- c.finishRequest()
- if c.closeAfterReply {
+ w.finishRequest()
+ if w.closeAfterReply {
break
}
}
c.close()
}
-// Hijack lets the caller take over the connection.
-// After a call to c.Hijack(), the HTTP server library
-// will not do anything else with the connection.
-// It becomes the caller's responsibility to manage
-// and close the connection.
-func (c *Conn) Hijack() (rwc io.ReadWriteCloser, buf *bufio.ReadWriter, err os.Error) {
- if c.hijacked {
+// Hijack impements the ResponseWriter.Hijack method.
+func (w *response) Hijack() (rwc io.ReadWriteCloser, buf *bufio.ReadWriter, err os.Error) {
+ if w.conn.hijacked {
return nil, nil, ErrHijacked
}
- c.hijacked = true
- rwc = c.rwc
- buf = c.buf
- c.rwc = nil
- c.buf = nil
+ w.conn.hijacked = true
+ rwc = w.conn.rwc
+ buf = w.conn.buf
+ w.conn.rwc = nil
+ w.conn.buf = nil
return
}
@@ -360,24 +426,24 @@ func (c *Conn) Hijack() (rwc io.ReadWriteCloser, buf *bufio.ReadWriter, err os.E
// ordinary functions as HTTP handlers. If f is a function
// with the appropriate signature, HandlerFunc(f) is a
// Handler object that calls f.
-type HandlerFunc func(*Conn, *Request)
+type HandlerFunc func(ResponseWriter, *Request)
-// ServeHTTP calls f(c, req).
-func (f HandlerFunc) ServeHTTP(c *Conn, req *Request) {
- f(c, req)
+// ServeHTTP calls f(w, req).
+func (f HandlerFunc) ServeHTTP(w ResponseWriter, r *Request) {
+ f(w, r)
}
// Helper handlers
// Error replies to the request with the specified error message and HTTP code.
-func Error(c *Conn, error string, code int) {
- c.SetHeader("Content-Type", "text/plain; charset=utf-8")
- c.WriteHeader(code)
- fmt.Fprintln(c, error)
+func Error(w ResponseWriter, error string, code int) {
+ w.SetHeader("Content-Type", "text/plain; charset=utf-8")
+ w.WriteHeader(code)
+ fmt.Fprintln(w, error)
}
// NotFound replies to the request with an HTTP 404 not found error.
-func NotFound(c *Conn, req *Request) { Error(c, "404 page not found", StatusNotFound) }
+func NotFound(w ResponseWriter, r *Request) { Error(w, "404 page not found", StatusNotFound) }
// NotFoundHandler returns a simple request handler
// that replies to each request with a ``404 page not found'' reply.
@@ -385,59 +451,64 @@ func NotFoundHandler() Handler { return HandlerFunc(NotFound) }
// Redirect replies to the request with a redirect to url,
// which may be a path relative to the request path.
-func Redirect(c *Conn, url string, code int) {
- // RFC2616 recommends that a short note "SHOULD" be included in the
- // response because older user agents may not understand 301/307.
- note := "<a href=\"%v\">" + statusText[code] + "</a>.\n"
- if c.Req.Method == "POST" {
- note = ""
+func Redirect(w ResponseWriter, r *Request, url string, code int) {
+ if u, err := ParseURL(url); err == nil {
+ // If url was relative, make absolute by
+ // combining with request path.
+ // The browser would probably do this for us,
+ // but doing it ourselves is more reliable.
+
+ // NOTE(rsc): RFC 2616 says that the Location
+ // line must be an absolute URI, like
+ // "http://www.google.com/redirect/",
+ // not a path like "/redirect/".
+ // Unfortunately, we don't know what to
+ // put in the host name section to get the
+ // client to connect to us again, so we can't
+ // know the right absolute URI to send back.
+ // Because of this problem, no one pays attention
+ // to the RFC; they all send back just a new path.
+ // So do we.
+ oldpath := r.URL.Path
+ if oldpath == "" { // should not happen, but avoid a crash if it does
+ oldpath = "/"
+ }
+ if u.Scheme == "" {
+ // no leading http://server
+ if url == "" || url[0] != '/' {
+ // make relative path absolute
+ olddir, _ := path.Split(oldpath)
+ url = olddir + url
+ }
+
+ // clean up but preserve trailing slash
+ trailing := url[len(url)-1] == '/'
+ url = path.Clean(url)
+ if trailing && url[len(url)-1] != '/' {
+ url += "/"
+ }
+ }
}
- u, err := ParseURL(url)
- if err != nil {
- goto finish
- }
-
- // If url was relative, make absolute by
- // combining with request path.
- // The browser would probably do this for us,
- // but doing it ourselves is more reliable.
-
- // NOTE(rsc): RFC 2616 says that the Location
- // line must be an absolute URI, like
- // "http://www.google.com/redirect/",
- // not a path like "/redirect/".
- // Unfortunately, we don't know what to
- // put in the host name section to get the
- // client to connect to us again, so we can't
- // know the right absolute URI to send back.
- // Because of this problem, no one pays attention
- // to the RFC; they all send back just a new path.
- // So do we.
- oldpath := c.Req.URL.Path
- if oldpath == "" { // should not happen, but avoid a crash if it does
- oldpath = "/"
- }
- if u.Scheme == "" {
- // no leading http://server
- if url == "" || url[0] != '/' {
- // make relative path absolute
- olddir, _ := path.Split(oldpath)
- url = olddir + url
- }
+ w.SetHeader("Location", url)
+ w.WriteHeader(code)
- // clean up but preserve trailing slash
- trailing := url[len(url)-1] == '/'
- url = path.Clean(url)
- if trailing && url[len(url)-1] != '/' {
- url += "/"
- }
+ // RFC2616 recommends that a short note "SHOULD" be included in the
+ // response because older user agents may not understand 301/307.
+ note := "<a href=\"" + htmlEscape(url) + "\">" + statusText[code] + "</a>.\n"
+ if r.Method == "POST" {
+ note = ""
}
+ fmt.Fprintln(w, note)
+}
-finish:
- c.SetHeader("Location", url)
- c.WriteHeader(code)
- fmt.Fprintf(c, note, url)
+func htmlEscape(s string) string {
+ s = strings.Replace(s, "&", "&amp;", -1)
+ s = strings.Replace(s, "<", "&lt;", -1)
+ s = strings.Replace(s, ">", "&gt;", -1)
+ s = strings.Replace(s, "\"", "&quot;", -1)
+ s = strings.Replace(s, "'", "&apos;", -1)
+ return s
}
// Redirect to a fixed URL
@@ -446,8 +517,8 @@ type redirectHandler struct {
code int
}
-func (rh *redirectHandler) ServeHTTP(c *Conn, req *Request) {
- Redirect(c, rh.url, rh.code)
+func (rh *redirectHandler) ServeHTTP(w ResponseWriter, r *Request) {
+ Redirect(w, r, rh.url, rh.code)
}
// RedirectHandler returns a request handler that redirects
@@ -523,11 +594,11 @@ func cleanPath(p string) string {
// ServeHTTP dispatches the request to the handler whose
// pattern most closely matches the request URL.
-func (mux *ServeMux) ServeHTTP(c *Conn, req *Request) {
+func (mux *ServeMux) ServeHTTP(w ResponseWriter, r *Request) {
// Clean path to canonical form and redirect.
- if p := cleanPath(req.URL.Path); p != req.URL.Path {
- c.SetHeader("Location", p)
- c.WriteHeader(StatusMovedPermanently)
+ if p := cleanPath(r.URL.Path); p != r.URL.Path {
+ w.SetHeader("Location", p)
+ w.WriteHeader(StatusMovedPermanently)
return
}
@@ -535,7 +606,7 @@ func (mux *ServeMux) ServeHTTP(c *Conn, req *Request) {
var h Handler
var n = 0
for k, v := range mux.m {
- if !pathMatch(k, req.URL.Path) {
+ if !pathMatch(k, r.URL.Path) {
continue
}
if h == nil || len(k) > n {
@@ -546,7 +617,7 @@ func (mux *ServeMux) ServeHTTP(c *Conn, req *Request) {
if h == nil {
h = NotFoundHandler()
}
- h.ServeHTTP(c, req)
+ h.ServeHTTP(w, r)
}
// Handle registers the handler for the given pattern.
@@ -566,7 +637,7 @@ func (mux *ServeMux) Handle(pattern string, handler Handler) {
}
// HandleFunc registers the handler function for the given pattern.
-func (mux *ServeMux) HandleFunc(pattern string, handler func(*Conn, *Request)) {
+func (mux *ServeMux) HandleFunc(pattern string, handler func(ResponseWriter, *Request)) {
mux.Handle(pattern, HandlerFunc(handler))
}
@@ -576,7 +647,7 @@ func Handle(pattern string, handler Handler) { DefaultServeMux.Handle(pattern, h
// HandleFunc registers the handler function for the given pattern
// in the DefaultServeMux.
-func HandleFunc(pattern string, handler func(*Conn, *Request)) {
+func HandleFunc(pattern string, handler func(ResponseWriter, *Request)) {
DefaultServeMux.HandleFunc(pattern, handler)
}
@@ -618,8 +689,8 @@ func Serve(l net.Listener, handler Handler) os.Error {
// )
//
// // hello world, the web server
-// func HelloServer(c *http.Conn, req *http.Request) {
-// io.WriteString(c, "hello, world!\n")
+// func HelloServer(w http.ResponseWriter, req *http.Request) {
+// io.WriteString(w, "hello, world!\n")
// }
//
// func main() {
@@ -638,3 +709,52 @@ func ListenAndServe(addr string, handler Handler) os.Error {
l.Close()
return e
}
+
+// ListenAndServeTLS acts identically to ListenAndServe, except that it
+// expects HTTPS connections. Additionally, files containing a certificate and
+// matching private key for the server must be provided.
+//
+// A trivial example server is:
+//
+// import (
+// "http"
+// "log"
+// )
+//
+// func handler(w http.ResponseWriter, req *http.Request) {
+// w.SetHeader("Content-Type", "text/plain")
+// w.Write([]byte("This is an example server.\n"))
+// }
+//
+// func main() {
+// http.HandleFunc("/", handler)
+// log.Printf("About to listen on 10443. Go to https://127.0.0.1:10443/")
+// err := http.ListenAndServeTLS(":10443", "cert.pem", "key.pem", nil)
+// if err != nil {
+// log.Exit(err)
+// }
+// }
+//
+// One can use generate_cert.go in crypto/tls to generate cert.pem and key.pem.
+func ListenAndServeTLS(addr string, certFile string, keyFile string, handler Handler) os.Error {
+ config := &tls.Config{
+ Rand: rand.Reader,
+ Time: time.Seconds,
+ NextProtos: []string{"http/1.1"},
+ }
+
+ var err os.Error
+ config.Certificates = make([]tls.Certificate, 1)
+ config.Certificates[0], err = tls.LoadX509KeyPair(certFile, keyFile)
+ if err != nil {
+ return err
+ }
+
+ conn, err := net.Listen("tcp", addr)
+ if err != nil {
+ return err
+ }
+
+ tlsListener := tls.NewListener(conn, config)
+ return Serve(tlsListener, handler)
+}
diff --git a/src/pkg/http/status.go b/src/pkg/http/status.go
index 82a66d7ad..b6e2d65c6 100644
--- a/src/pkg/http/status.go
+++ b/src/pkg/http/status.go
@@ -98,3 +98,9 @@ var statusText = map[int]string{
StatusGatewayTimeout: "Gateway Timeout",
StatusHTTPVersionNotSupported: "HTTP Version Not Supported",
}
+
+// StatusText returns a text for the HTTP status code. It returns the empty
+// string if the code is unknown.
+func StatusText(code int) string {
+ return statusText[code]
+}
diff --git a/src/pkg/http/testdata/file b/src/pkg/http/testdata/file
new file mode 100644
index 000000000..11f11f9be
--- /dev/null
+++ b/src/pkg/http/testdata/file
@@ -0,0 +1 @@
+0123456789
diff --git a/src/pkg/http/transfer.go b/src/pkg/http/transfer.go
index 5e190d74c..e62885d62 100644
--- a/src/pkg/http/transfer.go
+++ b/src/pkg/http/transfer.go
@@ -108,7 +108,7 @@ func (t *transferWriter) WriteHeader(w io.Writer) (err os.Error) {
// writing long headers, using HTTP line splitting
io.WriteString(w, "Trailer: ")
needComma := false
- for k, _ := range t.Trailer {
+ for k := range t.Trailer {
k = CanonicalHeaderKey(k)
switch k {
case "Transfer-Encoding", "Trailer", "Content-Length":
@@ -135,6 +135,8 @@ func (t *transferWriter) WriteBody(w io.Writer) (err os.Error) {
if err == nil {
err = cw.Close()
}
+ } else if t.ContentLength == -1 {
+ _, err = io.Copy(w, t.Body)
} else {
_, err = io.Copy(w, io.LimitReader(t.Body, t.ContentLength))
}
@@ -182,6 +184,7 @@ func readTransfer(msg interface{}, r *bufio.Reader) (err os.Error) {
t.RequestMethod = rr.RequestMethod
t.ProtoMajor = rr.ProtoMajor
t.ProtoMinor = rr.ProtoMinor
+ t.Close = shouldClose(t.ProtoMajor, t.ProtoMinor, t.Header)
case *Request:
t.Header = rr.Header
t.ProtoMajor = rr.ProtoMajor
@@ -208,9 +211,6 @@ func readTransfer(msg interface{}, r *bufio.Reader) (err os.Error) {
return err
}
- // Closing
- t.Close = shouldClose(t.ProtoMajor, t.ProtoMinor, t.Header)
-
// Trailer
t.Trailer, err = fixTrailer(t.Header, t.TransferEncoding)
if err != nil {
@@ -340,7 +340,7 @@ func fixLength(status int, requestMethod string, header map[string]string, te []
// Logic based on media type. The purpose of the following code is just
// to detect whether the unsupported "multipart/byteranges" is being
// used. A proper Content-Type parser is needed in the future.
- if strings.Index(strings.ToLower(header["Content-Type"]), "multipart/byteranges") >= 0 {
+ if strings.Contains(strings.ToLower(header["Content-Type"]), "multipart/byteranges") {
return -1, ErrNotSupported
}
@@ -350,9 +350,20 @@ func fixLength(status int, requestMethod string, header map[string]string, te []
// Determine whether to hang up after sending a request and body, or
// receiving a response and body
+// 'header' is the request headers
func shouldClose(major, minor int, header map[string]string) bool {
- if major < 1 || (major == 1 && minor < 1) {
+ if major < 1 {
return true
+ } else if major == 1 && minor == 0 {
+ v, present := header["Connection"]
+ if !present {
+ return true
+ }
+ v = strings.ToLower(v)
+ if !strings.Contains(v, "keep-alive") {
+ return true
+ }
+ return false
} else if v, present := header["Connection"]; present {
// TODO: Should split on commas, toss surrounding white space,
// and check each field.
diff --git a/src/pkg/http/triv.go b/src/pkg/http/triv.go
index 612b6161e..03cfafa7b 100644
--- a/src/pkg/http/triv.go
+++ b/src/pkg/http/triv.go
@@ -20,9 +20,9 @@ import (
// hello world, the web server
var helloRequests = expvar.NewInt("hello-requests")
-func HelloServer(c *http.Conn, req *http.Request) {
+func HelloServer(w http.ResponseWriter, req *http.Request) {
helloRequests.Add(1)
- io.WriteString(c, "hello, world!\n")
+ io.WriteString(w, "hello, world!\n")
}
// Simple counter server. POSTing to it will set the value.
@@ -34,7 +34,7 @@ type Counter struct {
// it directly.
func (ctr *Counter) String() string { return fmt.Sprintf("%d", ctr.n) }
-func (ctr *Counter) ServeHTTP(c *http.Conn, req *http.Request) {
+func (ctr *Counter) ServeHTTP(w http.ResponseWriter, req *http.Request) {
switch req.Method {
case "GET":
ctr.n++
@@ -43,53 +43,34 @@ func (ctr *Counter) ServeHTTP(c *http.Conn, req *http.Request) {
io.Copy(buf, req.Body)
body := buf.String()
if n, err := strconv.Atoi(body); err != nil {
- fmt.Fprintf(c, "bad POST: %v\nbody: [%v]\n", err, body)
+ fmt.Fprintf(w, "bad POST: %v\nbody: [%v]\n", err, body)
} else {
ctr.n = n
- fmt.Fprint(c, "counter reset\n")
+ fmt.Fprint(w, "counter reset\n")
}
}
- fmt.Fprintf(c, "counter = %d\n", ctr.n)
-}
-
-// simple file server
-var webroot = flag.String("root", "/home/rsc", "web root directory")
-var pathVar = expvar.NewMap("file-requests")
-
-func FileServer(c *http.Conn, req *http.Request) {
- c.SetHeader("content-type", "text/plain; charset=utf-8")
- pathVar.Add(req.URL.Path, 1)
- path := *webroot + req.URL.Path // TODO: insecure: use os.CleanName
- f, err := os.Open(path, os.O_RDONLY, 0)
- if err != nil {
- c.WriteHeader(http.StatusNotFound)
- fmt.Fprintf(c, "open %s: %v\n", path, err)
- return
- }
- n, _ := io.Copy(c, f)
- fmt.Fprintf(c, "[%d bytes]\n", n)
- f.Close()
+ fmt.Fprintf(w, "counter = %d\n", ctr.n)
}
// simple flag server
var booleanflag = flag.Bool("boolean", true, "another flag for testing")
-func FlagServer(c *http.Conn, req *http.Request) {
- c.SetHeader("content-type", "text/plain; charset=utf-8")
- fmt.Fprint(c, "Flags:\n")
+func FlagServer(w http.ResponseWriter, req *http.Request) {
+ w.SetHeader("content-type", "text/plain; charset=utf-8")
+ fmt.Fprint(w, "Flags:\n")
flag.VisitAll(func(f *flag.Flag) {
if f.Value.String() != f.DefValue {
- fmt.Fprintf(c, "%s = %s [default = %s]\n", f.Name, f.Value.String(), f.DefValue)
+ fmt.Fprintf(w, "%s = %s [default = %s]\n", f.Name, f.Value.String(), f.DefValue)
} else {
- fmt.Fprintf(c, "%s = %s\n", f.Name, f.Value.String())
+ fmt.Fprintf(w, "%s = %s\n", f.Name, f.Value.String())
}
})
}
// simple argument server
-func ArgServer(c *http.Conn, req *http.Request) {
+func ArgServer(w http.ResponseWriter, req *http.Request) {
for _, s := range os.Args {
- fmt.Fprint(c, s, " ")
+ fmt.Fprint(w, s, " ")
}
}
@@ -106,44 +87,46 @@ func ChanCreate() Chan {
return c
}
-func (ch Chan) ServeHTTP(c *http.Conn, req *http.Request) {
- io.WriteString(c, fmt.Sprintf("channel send #%d\n", <-ch))
+func (ch Chan) ServeHTTP(w http.ResponseWriter, req *http.Request) {
+ io.WriteString(w, fmt.Sprintf("channel send #%d\n", <-ch))
}
// exec a program, redirecting output
-func DateServer(c *http.Conn, req *http.Request) {
- c.SetHeader("content-type", "text/plain; charset=utf-8")
+func DateServer(rw http.ResponseWriter, req *http.Request) {
+ rw.SetHeader("content-type", "text/plain; charset=utf-8")
r, w, err := os.Pipe()
if err != nil {
- fmt.Fprintf(c, "pipe: %s\n", err)
+ fmt.Fprintf(rw, "pipe: %s\n", err)
return
}
pid, err := os.ForkExec("/bin/date", []string{"date"}, os.Environ(), "", []*os.File{nil, w, w})
defer r.Close()
w.Close()
if err != nil {
- fmt.Fprintf(c, "fork/exec: %s\n", err)
+ fmt.Fprintf(rw, "fork/exec: %s\n", err)
return
}
- io.Copy(c, r)
+ io.Copy(rw, r)
wait, err := os.Wait(pid, 0)
if err != nil {
- fmt.Fprintf(c, "wait: %s\n", err)
+ fmt.Fprintf(rw, "wait: %s\n", err)
return
}
if !wait.Exited() || wait.ExitStatus() != 0 {
- fmt.Fprintf(c, "date: %v\n", wait)
+ fmt.Fprintf(rw, "date: %v\n", wait)
return
}
}
-func Logger(c *http.Conn, req *http.Request) {
- log.Stdout(req.URL.Raw)
- c.WriteHeader(404)
- c.Write([]byte("oops"))
+func Logger(w http.ResponseWriter, req *http.Request) {
+ log.Print(req.URL.Raw)
+ w.WriteHeader(404)
+ w.Write([]byte("oops"))
}
+var webroot = flag.String("root", "/home/rsc", "web root directory")
+
func main() {
flag.Parse()
@@ -153,7 +136,7 @@ func main() {
expvar.Publish("counter", ctr)
http.Handle("/", http.HandlerFunc(Logger))
- http.Handle("/go/", http.HandlerFunc(FileServer))
+ http.Handle("/go/", http.FileServer(*webroot, "/go/"))
http.Handle("/flags", http.HandlerFunc(FlagServer))
http.Handle("/args", http.HandlerFunc(ArgServer))
http.Handle("/go/hello", http.HandlerFunc(HelloServer))
@@ -161,6 +144,6 @@ func main() {
http.Handle("/date", http.HandlerFunc(DateServer))
err := http.ListenAndServe(":12345", nil)
if err != nil {
- log.Crash("ListenAndServe: ", err)
+ log.Panicln("ListenAndServe:", err)
}
}
diff --git a/src/pkg/http/url.go b/src/pkg/http/url.go
index 148ada4b2..f0ac4c1df 100644
--- a/src/pkg/http/url.go
+++ b/src/pkg/http/url.go
@@ -46,6 +46,17 @@ func unhex(c byte) byte {
return 0
}
+type encoding int
+
+const (
+ encodePath encoding = 1 + iota
+ encodeUserPassword
+ encodeQueryComponent
+ encodeFragment
+ encodeOpaque
+)
+
+
type URLEscapeError string
func (e URLEscapeError) String() string {
@@ -54,17 +65,53 @@ func (e URLEscapeError) String() string {
// Return true if the specified character should be escaped when
// appearing in a URL string, according to RFC 2396.
-func shouldEscape(c byte) bool {
- if c <= ' ' || c >= 0x7F {
- return true
+// When 'all' is true the full range of reserved characters are matched.
+func shouldEscape(c byte, mode encoding) bool {
+ // RFC 2396 §2.3 Unreserved characters (alphanum)
+ if 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || '0' <= c && c <= '9' {
+ return false
}
switch c {
- case '<', '>', '#', '%', '"', // RFC 2396 delims
- '{', '}', '|', '\\', '^', '[', ']', '`', // RFC2396 unwise
- '?', '&', '=', '+': // RFC 2396 reserved in path
- return true
+ case '-', '_', '.', '!', '~', '*', '\'', '(', ')': // §2.3 Unreserved characters (mark)
+ return false
+
+ case '$', '&', '+', ',', '/', ':', ';', '=', '?', '@': // §2.2 Reserved characters (reserved)
+ // Different sections of the URL allow a few of
+ // the reserved characters to appear unescaped.
+ switch mode {
+ case encodePath: // §3.3
+ // The RFC allows : @ & = + $ , but saves / ; for assigning
+ // meaning to individual path segments. This package
+ // only manipulates the path as a whole, so we allow those
+ // last two as well. Clients that need to distinguish between
+ // `/foo;y=z/bar` and `/foo%3by=z/bar` will have to re-decode RawPath.
+ // That leaves only ? to escape.
+ return c == '?'
+
+ case encodeUserPassword: // §3.2.2
+ // The RFC allows ; : & = + $ , in userinfo, so we must escape only @ and /.
+ // The parsing of userinfo treats : as special so we must escape that too.
+ return c == '@' || c == '/' || c == ':'
+
+ case encodeQueryComponent: // §3.4
+ // The RFC reserves (so we must escape) everything.
+ return true
+
+ case encodeFragment: // §4.1
+ // The RFC text is silent but the grammar allows
+ // everything, so escape nothing.
+ return false
+
+ case encodeOpaque: // §3 opaque_part
+ // The RFC allows opaque_part to use all characters
+ // except that the leading / must be escaped.
+ // (We implement that case in String.)
+ return false
+ }
}
- return false
+
+ // Everything else must be escaped.
+ return true
}
// CanonicalPath applies the algorithm specified in RFC 2396 to
@@ -124,17 +171,19 @@ func CanonicalPath(path string) string {
return string(a)
}
-// URLUnescape unescapes a URL-encoded string,
+// URLUnescape unescapes a string in ``URL encoded'' form,
// converting %AB into the byte 0xAB and '+' into ' ' (space).
// It returns an error if any % is not followed
// by two hexadecimal digits.
-func URLUnescape(s string) (string, os.Error) { return urlUnescape(s, true) }
+// Despite the name, this encoding applies only to individual
+// components of the query portion of the URL.
+func URLUnescape(s string) (string, os.Error) {
+ return urlUnescape(s, encodeQueryComponent)
+}
-// urlUnescape is like URLUnescape but can be told not to
-// convert + into space. URLUnescape implements what is
-// called "URL encoding" but that only applies to query strings.
-// Elsewhere in the URL, + does not mean space.
-func urlUnescape(s string, doPlus bool) (string, os.Error) {
+// urlUnescape is like URLUnescape but mode specifies
+// which section of the URL is being unescaped.
+func urlUnescape(s string, mode encoding) (string, os.Error) {
// Count %, check that they're well-formed.
n := 0
hasPlus := false
@@ -151,7 +200,7 @@ func urlUnescape(s string, doPlus bool) (string, os.Error) {
}
i += 3
case '+':
- hasPlus = doPlus
+ hasPlus = mode == encodeQueryComponent
i++
default:
i++
@@ -171,7 +220,7 @@ func urlUnescape(s string, doPlus bool) (string, os.Error) {
j++
i += 3
case '+':
- if doPlus {
+ if mode == encodeQueryComponent {
t[j] = ' '
} else {
t[j] = '+'
@@ -187,15 +236,19 @@ func urlUnescape(s string, doPlus bool) (string, os.Error) {
return string(t), nil
}
-// URLEscape converts a string into URL-encoded form.
-func URLEscape(s string) string { return urlEscape(s, true) }
+// URLEscape converts a string into ``URL encoded'' form.
+// Despite the name, this encoding applies only to individual
+// components of the query portion of the URL.
+func URLEscape(s string) string {
+ return urlEscape(s, encodeQueryComponent)
+}
-func urlEscape(s string, doPlus bool) string {
+func urlEscape(s string, mode encoding) string {
spaceCount, hexCount := 0, 0
for i := 0; i < len(s); i++ {
c := s[i]
- if shouldEscape(c) {
- if c == ' ' && doPlus {
+ if shouldEscape(c, mode) {
+ if c == ' ' && mode == encodeQueryComponent {
spaceCount++
} else {
hexCount++
@@ -211,10 +264,10 @@ func urlEscape(s string, doPlus bool) string {
j := 0
for i := 0; i < len(s); i++ {
switch c := s[i]; {
- case c == ' ' && doPlus:
+ case c == ' ' && mode == encodeQueryComponent:
t[j] = '+'
j++
- case shouldEscape(c):
+ case shouldEscape(c, mode):
t[j] = '%'
t[j+1] = "0123456789abcdef"[c>>4]
t[j+2] = "0123456789abcdef"[c&15]
@@ -227,25 +280,64 @@ func urlEscape(s string, doPlus bool) string {
return string(t)
}
+// UnescapeUserinfo parses the RawUserinfo field of a URL
+// as the form user or user:password and unescapes and returns
+// the two halves.
+//
+// This functionality should only be used with legacy web sites.
+// RFC 2396 warns that interpreting Userinfo this way
+// ``is NOT RECOMMENDED, because the passing of authentication
+// information in clear text (such as URI) has proven to be a
+// security risk in almost every case where it has been used.''
+func UnescapeUserinfo(rawUserinfo string) (user, password string, err os.Error) {
+ u, p := split(rawUserinfo, ':', true)
+ if user, err = urlUnescape(u, encodeUserPassword); err != nil {
+ return "", "", err
+ }
+ if password, err = urlUnescape(p, encodeUserPassword); err != nil {
+ return "", "", err
+ }
+ return
+}
+
+// EscapeUserinfo combines user and password in the form
+// user:password (or just user if password is empty) and then
+// escapes it for use as the URL.RawUserinfo field.
+//
+// This functionality should only be used with legacy web sites.
+// RFC 2396 warns that interpreting Userinfo this way
+// ``is NOT RECOMMENDED, because the passing of authentication
+// information in clear text (such as URI) has proven to be a
+// security risk in almost every case where it has been used.''
+func EscapeUserinfo(user, password string) string {
+ raw := urlEscape(user, encodeUserPassword)
+ if password != "" {
+ raw += ":" + urlEscape(password, encodeUserPassword)
+ }
+ return raw
+}
+
// A URL represents a parsed URL (technically, a URI reference).
// The general form represented is:
// scheme://[userinfo@]host/path[?query][#fragment]
-// The Raw, RawPath, and RawQuery fields are in "wire format" (special
-// characters must be hex-escaped if not meant to have special meaning).
+// The Raw, RawAuthority, RawPath, and RawQuery fields are in "wire format"
+// (special characters must be hex-escaped if not meant to have special meaning).
// All other fields are logical values; '+' or '%' represent themselves.
//
-// Note, the reason for using wire format for the query is that it needs
-// to be split into key/value pairs before decoding.
+// The various Raw values are supplied in wire format because
+// clients typically have to split them into pieces before further
+// decoding.
type URL struct {
- Raw string // the original string
- Scheme string // scheme
- Authority string // [userinfo@]host
- Userinfo string // userinfo
- Host string // host
- RawPath string // /path[?query][#fragment]
- Path string // /path
- RawQuery string // query
- Fragment string // fragment
+ Raw string // the original string
+ Scheme string // scheme
+ RawAuthority string // [userinfo@]host
+ RawUserinfo string // userinfo
+ Host string // host
+ RawPath string // /path[?query][#fragment]
+ Path string // /path
+ OpaquePath bool // path is opaque (unrooted when scheme is present)
+ RawQuery string // query
+ Fragment string // fragment
}
// Maybe rawurl is of the form scheme:path.
@@ -301,56 +393,63 @@ func ParseURL(rawurl string) (url *URL, err os.Error) {
url = new(URL)
url.Raw = rawurl
- // split off possible leading "http:", "mailto:", etc.
+ // Split off possible leading "http:", "mailto:", etc.
+ // Cannot contain escaped characters.
var path string
if url.Scheme, path, err = getscheme(rawurl); err != nil {
goto Error
}
- // RFC 2396: a relative URI (no scheme) has a ?query,
- // but absolute URIs only have query if path begins with /
- var query string
- if url.Scheme == "" || len(path) > 0 && path[0] == '/' {
- path, query = split(path, '?', false)
+ if url.Scheme != "" && (len(path) == 0 || path[0] != '/') {
+ // RFC 2396:
+ // Absolute URI (has scheme) with non-rooted path
+ // is uninterpreted. It doesn't even have a ?query.
+ // This is the case that handles mailto:name@example.com.
+ url.RawPath = path
+
+ if url.Path, err = urlUnescape(path, encodeOpaque); err != nil {
+ goto Error
+ }
+ url.OpaquePath = true
+ } else {
+ // Split off query before parsing path further.
+ url.RawPath = path
+ path, query := split(path, '?', false)
if len(query) > 1 {
url.RawQuery = query[1:]
}
- }
- // Maybe path is //authority/path
- if len(path) > 2 && path[0:2] == "//" {
- url.Authority, path = split(path[2:], '/', false)
- }
- url.RawPath = path + query
+ // Maybe path is //authority/path
+ if url.Scheme != "" && len(path) > 2 && path[0:2] == "//" {
+ url.RawAuthority, path = split(path[2:], '/', false)
+ url.RawPath = url.RawPath[2+len(url.RawAuthority):]
+ }
- // If there's no @, split's default is wrong. Check explicitly.
- if strings.Index(url.Authority, "@") < 0 {
- url.Host = url.Authority
- } else {
- url.Userinfo, url.Host = split(url.Authority, '@', true)
- }
+ // Split authority into userinfo@host.
+ // If there's no @, split's default is wrong. Check explicitly.
+ var rawHost string
+ if strings.Index(url.RawAuthority, "@") < 0 {
+ rawHost = url.RawAuthority
+ } else {
+ url.RawUserinfo, rawHost = split(url.RawAuthority, '@', true)
+ }
- if url.Path, err = urlUnescape(path, false); err != nil {
- goto Error
- }
+ // We leave RawAuthority only in raw form because clients
+ // of common protocols should be using Userinfo and Host
+ // instead. Clients that wish to use RawAuthority will have to
+ // interpret it themselves: RFC 2396 does not define the meaning.
- // Remove escapes from the Authority and Userinfo fields, and verify
- // that Scheme and Host contain no escapes (that would be illegal).
- if url.Authority, err = urlUnescape(url.Authority, false); err != nil {
- goto Error
- }
- if url.Userinfo, err = urlUnescape(url.Userinfo, false); err != nil {
- goto Error
- }
- if strings.Index(url.Scheme, "%") >= 0 {
- err = os.ErrorString("hexadecimal escape in scheme")
- goto Error
- }
- if strings.Index(url.Host, "%") >= 0 {
- err = os.ErrorString("hexadecimal escape in host")
- goto Error
- }
+ if strings.Contains(rawHost, "%") {
+ // Host cannot contain escaped characters.
+ err = os.ErrorString("hexadecimal escape in host")
+ goto Error
+ }
+ url.Host = rawHost
+ if url.Path, err = urlUnescape(path, encodePath); err != nil {
+ goto Error
+ }
+ }
return url, nil
Error:
@@ -369,7 +468,7 @@ func ParseURLReference(rawurlref string) (url *URL, err os.Error) {
url.RawPath += frag
if len(frag) > 1 {
frag = frag[1:]
- if url.Fragment, err = urlUnescape(frag, false); err != nil {
+ if url.Fragment, err = urlUnescape(frag, encodeFragment); err != nil {
return nil, &URLError{"parse", rawurl, err}
}
}
@@ -379,26 +478,52 @@ func ParseURLReference(rawurlref string) (url *URL, err os.Error) {
// String reassembles url into a valid URL string.
//
// There are redundant fields stored in the URL structure:
-// the String method consults Scheme, Path, Host, Userinfo,
+// the String method consults Scheme, Path, Host, RawUserinfo,
// RawQuery, and Fragment, but not Raw, RawPath or Authority.
func (url *URL) String() string {
result := ""
if url.Scheme != "" {
result += url.Scheme + ":"
}
- if url.Host != "" || url.Userinfo != "" {
+ if url.Host != "" || url.RawUserinfo != "" {
result += "//"
- if url.Userinfo != "" {
- result += urlEscape(url.Userinfo, false) + "@"
+ if url.RawUserinfo != "" {
+ // hide the password, if any
+ info := url.RawUserinfo
+ if i := strings.Index(info, ":"); i >= 0 {
+ info = info[0:i] + ":******"
+ }
+ result += info + "@"
}
result += url.Host
}
- result += urlEscape(url.Path, false)
+ if url.OpaquePath {
+ path := url.Path
+ if strings.HasPrefix(path, "/") {
+ result += "%2f"
+ path = path[1:]
+ }
+ result += urlEscape(path, encodeOpaque)
+ } else {
+ result += urlEscape(url.Path, encodePath)
+ }
if url.RawQuery != "" {
result += "?" + url.RawQuery
}
if url.Fragment != "" {
- result += "#" + urlEscape(url.Fragment, false)
+ result += "#" + urlEscape(url.Fragment, encodeFragment)
}
return result
}
+
+// EncodeQuery encodes the query represented as a multimap.
+func EncodeQuery(m map[string][]string) string {
+ parts := make([]string, 0, len(m)) // will be large enough for most uses
+ for k, vs := range m {
+ prefix := URLEscape(k) + "="
+ for _, v := range vs {
+ parts = append(parts, prefix+URLEscape(v))
+ }
+ }
+ return strings.Join(parts, "&")
+}
diff --git a/src/pkg/http/url_test.go b/src/pkg/http/url_test.go
index 3d665100a..447d5390e 100644
--- a/src/pkg/http/url_test.go
+++ b/src/pkg/http/url_test.go
@@ -24,125 +24,138 @@ type URLTest struct {
var urltests = []URLTest{
// no path
- URLTest{
+ {
"http://www.google.com",
&URL{
- Raw: "http://www.google.com",
- Scheme: "http",
- Authority: "www.google.com",
- Host: "www.google.com",
+ Raw: "http://www.google.com",
+ Scheme: "http",
+ RawAuthority: "www.google.com",
+ Host: "www.google.com",
},
"",
},
// path
- URLTest{
+ {
"http://www.google.com/",
&URL{
- Raw: "http://www.google.com/",
- Scheme: "http",
- Authority: "www.google.com",
- Host: "www.google.com",
- RawPath: "/",
- Path: "/",
+ Raw: "http://www.google.com/",
+ Scheme: "http",
+ RawAuthority: "www.google.com",
+ Host: "www.google.com",
+ RawPath: "/",
+ Path: "/",
},
"",
},
// path with hex escaping
- URLTest{
+ {
"http://www.google.com/file%20one%26two",
&URL{
- Raw: "http://www.google.com/file%20one%26two",
- Scheme: "http",
- Authority: "www.google.com",
- Host: "www.google.com",
- RawPath: "/file%20one%26two",
- Path: "/file one&two",
+ Raw: "http://www.google.com/file%20one%26two",
+ Scheme: "http",
+ RawAuthority: "www.google.com",
+ Host: "www.google.com",
+ RawPath: "/file%20one%26two",
+ Path: "/file one&two",
},
- "http://www.google.com/file%20one%26two",
+ "http://www.google.com/file%20one&two",
},
// user
- URLTest{
+ {
"ftp://webmaster@www.google.com/",
&URL{
- Raw: "ftp://webmaster@www.google.com/",
- Scheme: "ftp",
- Authority: "webmaster@www.google.com",
- Userinfo: "webmaster",
- Host: "www.google.com",
- RawPath: "/",
- Path: "/",
+ Raw: "ftp://webmaster@www.google.com/",
+ Scheme: "ftp",
+ RawAuthority: "webmaster@www.google.com",
+ RawUserinfo: "webmaster",
+ Host: "www.google.com",
+ RawPath: "/",
+ Path: "/",
},
"",
},
// escape sequence in username
- URLTest{
+ {
"ftp://john%20doe@www.google.com/",
&URL{
- Raw: "ftp://john%20doe@www.google.com/",
- Scheme: "ftp",
- Authority: "john doe@www.google.com",
- Userinfo: "john doe",
- Host: "www.google.com",
- RawPath: "/",
- Path: "/",
+ Raw: "ftp://john%20doe@www.google.com/",
+ Scheme: "ftp",
+ RawAuthority: "john%20doe@www.google.com",
+ RawUserinfo: "john%20doe",
+ Host: "www.google.com",
+ RawPath: "/",
+ Path: "/",
},
"ftp://john%20doe@www.google.com/",
},
// query
- URLTest{
+ {
"http://www.google.com/?q=go+language",
&URL{
- Raw: "http://www.google.com/?q=go+language",
- Scheme: "http",
- Authority: "www.google.com",
- Host: "www.google.com",
- RawPath: "/?q=go+language",
- Path: "/",
- RawQuery: "q=go+language",
+ Raw: "http://www.google.com/?q=go+language",
+ Scheme: "http",
+ RawAuthority: "www.google.com",
+ Host: "www.google.com",
+ RawPath: "/?q=go+language",
+ Path: "/",
+ RawQuery: "q=go+language",
},
"",
},
// query with hex escaping: NOT parsed
- URLTest{
+ {
"http://www.google.com/?q=go%20language",
&URL{
- Raw: "http://www.google.com/?q=go%20language",
- Scheme: "http",
- Authority: "www.google.com",
- Host: "www.google.com",
- RawPath: "/?q=go%20language",
- Path: "/",
- RawQuery: "q=go%20language",
+ Raw: "http://www.google.com/?q=go%20language",
+ Scheme: "http",
+ RawAuthority: "www.google.com",
+ Host: "www.google.com",
+ RawPath: "/?q=go%20language",
+ Path: "/",
+ RawQuery: "q=go%20language",
},
"",
},
// %20 outside query
- URLTest{
+ {
"http://www.google.com/a%20b?q=c+d",
&URL{
- Raw: "http://www.google.com/a%20b?q=c+d",
- Scheme: "http",
- Authority: "www.google.com",
- Host: "www.google.com",
- RawPath: "/a%20b?q=c+d",
- Path: "/a b",
- RawQuery: "q=c+d",
+ Raw: "http://www.google.com/a%20b?q=c+d",
+ Scheme: "http",
+ RawAuthority: "www.google.com",
+ Host: "www.google.com",
+ RawPath: "/a%20b?q=c+d",
+ Path: "/a b",
+ RawQuery: "q=c+d",
},
"",
},
- // path without /, so no query parsing
- URLTest{
+ // path without leading /, so no query parsing
+ {
"http:www.google.com/?q=go+language",
&URL{
- Raw: "http:www.google.com/?q=go+language",
- Scheme: "http",
- RawPath: "www.google.com/?q=go+language",
- Path: "www.google.com/?q=go+language",
+ Raw: "http:www.google.com/?q=go+language",
+ Scheme: "http",
+ RawPath: "www.google.com/?q=go+language",
+ Path: "www.google.com/?q=go+language",
+ OpaquePath: true,
+ },
+ "http:www.google.com/?q=go+language",
+ },
+ // path without leading /, so no query parsing
+ {
+ "http:%2f%2fwww.google.com/?q=go+language",
+ &URL{
+ Raw: "http:%2f%2fwww.google.com/?q=go+language",
+ Scheme: "http",
+ RawPath: "%2f%2fwww.google.com/?q=go+language",
+ Path: "//www.google.com/?q=go+language",
+ OpaquePath: true,
},
- "http:www.google.com/%3fq%3dgo%2blanguage",
+ "http:%2f/www.google.com/?q=go+language",
},
// non-authority
- URLTest{
+ {
"mailto:/webmaster@golang.org",
&URL{
Raw: "mailto:/webmaster@golang.org",
@@ -153,18 +166,19 @@ var urltests = []URLTest{
"",
},
// non-authority
- URLTest{
+ {
"mailto:webmaster@golang.org",
&URL{
- Raw: "mailto:webmaster@golang.org",
- Scheme: "mailto",
- RawPath: "webmaster@golang.org",
- Path: "webmaster@golang.org",
+ Raw: "mailto:webmaster@golang.org",
+ Scheme: "mailto",
+ RawPath: "webmaster@golang.org",
+ Path: "webmaster@golang.org",
+ OpaquePath: true,
},
"",
},
// unescaped :// in query should not create a scheme
- URLTest{
+ {
"/foo?query=http://bad",
&URL{
Raw: "/foo?query=http://bad",
@@ -174,59 +188,92 @@ var urltests = []URLTest{
},
"",
},
+ // leading // without scheme shouldn't create an authority
+ {
+ "//foo",
+ &URL{
+ Raw: "//foo",
+ Scheme: "",
+ RawPath: "//foo",
+ Path: "//foo",
+ },
+ "",
+ },
+ {
+ "http://user:password@google.com",
+ &URL{
+ Raw: "http://user:password@google.com",
+ Scheme: "http",
+ RawAuthority: "user:password@google.com",
+ RawUserinfo: "user:password",
+ Host: "google.com",
+ },
+ "http://user:******@google.com",
+ },
+ {
+ "http://user:longerpass@google.com",
+ &URL{
+ Raw: "http://user:longerpass@google.com",
+ Scheme: "http",
+ RawAuthority: "user:longerpass@google.com",
+ RawUserinfo: "user:longerpass",
+ Host: "google.com",
+ },
+ "http://user:******@google.com",
+ },
}
var urlnofragtests = []URLTest{
- URLTest{
+ {
"http://www.google.com/?q=go+language#foo",
&URL{
- Raw: "http://www.google.com/?q=go+language#foo",
- Scheme: "http",
- Authority: "www.google.com",
- Host: "www.google.com",
- RawPath: "/?q=go+language#foo",
- Path: "/",
- RawQuery: "q=go+language#foo",
+ Raw: "http://www.google.com/?q=go+language#foo",
+ Scheme: "http",
+ RawAuthority: "www.google.com",
+ Host: "www.google.com",
+ RawPath: "/?q=go+language#foo",
+ Path: "/",
+ RawQuery: "q=go+language#foo",
},
"",
},
}
var urlfragtests = []URLTest{
- URLTest{
+ {
"http://www.google.com/?q=go+language#foo",
&URL{
- Raw: "http://www.google.com/?q=go+language#foo",
- Scheme: "http",
- Authority: "www.google.com",
- Host: "www.google.com",
- RawPath: "/?q=go+language#foo",
- Path: "/",
- RawQuery: "q=go+language",
- Fragment: "foo",
+ Raw: "http://www.google.com/?q=go+language#foo",
+ Scheme: "http",
+ RawAuthority: "www.google.com",
+ Host: "www.google.com",
+ RawPath: "/?q=go+language#foo",
+ Path: "/",
+ RawQuery: "q=go+language",
+ Fragment: "foo",
},
"",
},
- URLTest{
+ {
"http://www.google.com/?q=go+language#foo%26bar",
&URL{
- Raw: "http://www.google.com/?q=go+language#foo%26bar",
- Scheme: "http",
- Authority: "www.google.com",
- Host: "www.google.com",
- RawPath: "/?q=go+language#foo%26bar",
- Path: "/",
- RawQuery: "q=go+language",
- Fragment: "foo&bar",
+ Raw: "http://www.google.com/?q=go+language#foo%26bar",
+ Scheme: "http",
+ RawAuthority: "www.google.com",
+ Host: "www.google.com",
+ RawPath: "/?q=go+language#foo%26bar",
+ Path: "/",
+ RawQuery: "q=go+language",
+ Fragment: "foo&bar",
},
- "",
+ "http://www.google.com/?q=go+language#foo&bar",
},
}
// more useful string for debugging than fmt's struct printer
func ufmt(u *URL) string {
return fmt.Sprintf("%q, %q, %q, %q, %q, %q, %q, %q, %q",
- u.Raw, u.Scheme, u.RawPath, u.Authority, u.Userinfo,
+ u.Raw, u.Scheme, u.RawPath, u.RawAuthority, u.RawUserinfo,
u.Host, u.Path, u.RawQuery, u.Fragment)
}
@@ -274,11 +321,9 @@ func DoTestString(t *testing.T, parse func(string) (*URL, os.Error), name string
func TestURLString(t *testing.T) {
DoTestString(t, ParseURL, "ParseURL", urltests)
- DoTestString(t, ParseURL, "ParseURL", urlfragtests)
DoTestString(t, ParseURL, "ParseURL", urlnofragtests)
DoTestString(t, ParseURLReference, "ParseURLReference", urltests)
DoTestString(t, ParseURLReference, "ParseURLReference", urlfragtests)
- DoTestString(t, ParseURLReference, "ParseURLReference", urlnofragtests)
}
type URLEscapeTest struct {
@@ -288,57 +333,57 @@ type URLEscapeTest struct {
}
var unescapeTests = []URLEscapeTest{
- URLEscapeTest{
+ {
"",
"",
nil,
},
- URLEscapeTest{
+ {
"abc",
"abc",
nil,
},
- URLEscapeTest{
+ {
"1%41",
"1A",
nil,
},
- URLEscapeTest{
+ {
"1%41%42%43",
"1ABC",
nil,
},
- URLEscapeTest{
+ {
"%4a",
"J",
nil,
},
- URLEscapeTest{
+ {
"%6F",
"o",
nil,
},
- URLEscapeTest{
+ {
"%", // not enough characters after %
"",
URLEscapeError("%"),
},
- URLEscapeTest{
+ {
"%a", // not enough characters after %
"",
URLEscapeError("%a"),
},
- URLEscapeTest{
+ {
"%1", // not enough characters after %
"",
URLEscapeError("%1"),
},
- URLEscapeTest{
+ {
"123%45%6", // not enough characters after %
"",
URLEscapeError("%6"),
},
- URLEscapeTest{
+ {
"%zzzzz", // invalid hex digits
"",
URLEscapeError("%zz"),
@@ -355,27 +400,27 @@ func TestURLUnescape(t *testing.T) {
}
var escapeTests = []URLEscapeTest{
- URLEscapeTest{
+ {
"",
"",
nil,
},
- URLEscapeTest{
+ {
"abc",
"abc",
nil,
},
- URLEscapeTest{
+ {
"one two",
"one+two",
nil,
},
- URLEscapeTest{
+ {
"10%",
"10%25",
nil,
},
- URLEscapeTest{
+ {
" ?&=#+%!<>#\"{}|\\^[]`☺\t",
"+%3f%26%3d%23%2b%25!%3c%3e%23%22%7b%7d%7c%5c%5e%5b%5d%60%e2%98%ba%09",
nil,
@@ -403,27 +448,27 @@ type CanonicalPathTest struct {
}
var canonicalTests = []CanonicalPathTest{
- CanonicalPathTest{"", ""},
- CanonicalPathTest{"/", "/"},
- CanonicalPathTest{".", ""},
- CanonicalPathTest{"./", ""},
- CanonicalPathTest{"/a/", "/a/"},
- CanonicalPathTest{"a/", "a/"},
- CanonicalPathTest{"a/./", "a/"},
- CanonicalPathTest{"./a", "a"},
- CanonicalPathTest{"/a/../b", "/b"},
- CanonicalPathTest{"a/../b", "b"},
- CanonicalPathTest{"a/../../b", "../b"},
- CanonicalPathTest{"a/.", "a/"},
- CanonicalPathTest{"../.././a", "../../a"},
- CanonicalPathTest{"/../.././a", "/../../a"},
- CanonicalPathTest{"a/b/g/../..", "a/"},
- CanonicalPathTest{"a/b/..", "a/"},
- CanonicalPathTest{"a/b/.", "a/b/"},
- CanonicalPathTest{"a/b/../../../..", "../.."},
- CanonicalPathTest{"a./", "a./"},
- CanonicalPathTest{"/../a/b/../../../", "/../../"},
- CanonicalPathTest{"../a/b/../../../", "../../"},
+ {"", ""},
+ {"/", "/"},
+ {".", ""},
+ {"./", ""},
+ {"/a/", "/a/"},
+ {"a/", "a/"},
+ {"a/./", "a/"},
+ {"./a", "a"},
+ {"/a/../b", "/b"},
+ {"a/../b", "b"},
+ {"a/../../b", "../b"},
+ {"a/.", "a/"},
+ {"../.././a", "../../a"},
+ {"/../.././a", "/../../a"},
+ {"a/b/g/../..", "a/"},
+ {"a/b/..", "a/"},
+ {"a/b/.", "a/b/"},
+ {"a/b/../../../..", "../.."},
+ {"a./", "a./"},
+ {"/../a/b/../../../", "/../../"},
+ {"../a/b/../../../", "../../"},
}
func TestCanonicalPath(t *testing.T) {
@@ -434,3 +479,53 @@ func TestCanonicalPath(t *testing.T) {
}
}
}
+
+type UserinfoTest struct {
+ User string
+ Password string
+ Raw string
+}
+
+var userinfoTests = []UserinfoTest{
+ {"user", "password", "user:password"},
+ {"foo:bar", "~!@#$%^&*()_+{}|[]\\-=`:;'\"<>?,./",
+ "foo%3abar:~!%40%23$%25%5e&*()_+%7b%7d%7c%5b%5d%5c-=%60%3a;'%22%3c%3e?,.%2f"},
+}
+
+func TestEscapeUserinfo(t *testing.T) {
+ for _, tt := range userinfoTests {
+ if raw := EscapeUserinfo(tt.User, tt.Password); raw != tt.Raw {
+ t.Errorf("EscapeUserinfo(%q, %q) = %q, want %q", tt.User, tt.Password, raw, tt.Raw)
+ }
+ }
+}
+
+func TestUnescapeUserinfo(t *testing.T) {
+ for _, tt := range userinfoTests {
+ if user, pass, err := UnescapeUserinfo(tt.Raw); user != tt.User || pass != tt.Password || err != nil {
+ t.Errorf("UnescapeUserinfo(%q) = %q, %q, %v, want %q, %q, nil", tt.Raw, user, pass, err, tt.User, tt.Password)
+ }
+ }
+}
+
+type qMap map[string][]string
+
+type EncodeQueryTest struct {
+ m qMap
+ expected string
+ expected1 string
+}
+
+var encodeQueryTests = []EncodeQueryTest{
+ {nil, "", ""},
+ {qMap{"q": {"puppies"}, "oe": {"utf8"}}, "q=puppies&oe=utf8", "oe=utf8&q=puppies"},
+ {qMap{"q": {"dogs", "&", "7"}}, "q=dogs&q=%26&q=7", "q=dogs&q=%26&q=7"},
+}
+
+func TestEncodeQuery(t *testing.T) {
+ for _, tt := range encodeQueryTests {
+ if q := EncodeQuery(tt.m); q != tt.expected && q != tt.expected1 {
+ t.Errorf(`EncodeQuery(%+v) = %q, want %q`, tt.m, q, tt.expected)
+ }
+ }
+}