diff options
Diffstat (limited to 'src/pkg/reflect')
-rw-r--r-- | src/pkg/reflect/all_test.go | 223 | ||||
-rw-r--r-- | src/pkg/reflect/asm_amd64p32.s | 27 | ||||
-rw-r--r-- | src/pkg/reflect/deepequal.go | 3 | ||||
-rw-r--r-- | src/pkg/reflect/export_test.go | 1 | ||||
-rw-r--r-- | src/pkg/reflect/makefunc.go | 8 | ||||
-rw-r--r-- | src/pkg/reflect/type.go | 131 | ||||
-rw-r--r-- | src/pkg/reflect/value.go | 968 |
7 files changed, 947 insertions, 414 deletions
diff --git a/src/pkg/reflect/all_test.go b/src/pkg/reflect/all_test.go index e9a20963f..e9949012c 100644 --- a/src/pkg/reflect/all_test.go +++ b/src/pkg/reflect/all_test.go @@ -15,6 +15,7 @@ import ( . "reflect" "runtime" "sort" + "strings" "sync" "testing" "time" @@ -678,6 +679,7 @@ var deepEqualTests = []DeepEqualTest{ {1, nil, false}, {fn1, fn3, false}, {fn3, fn3, false}, + {[][]int{[]int{1}}, [][]int{[]int{2}}, false}, // Nil vs empty: not the same. {[]int{}, []int(nil), false}, @@ -971,6 +973,31 @@ func TestMap(t *testing.T) { } } +func TestNilMap(t *testing.T) { + var m map[string]int + mv := ValueOf(m) + keys := mv.MapKeys() + if len(keys) != 0 { + t.Errorf(">0 keys for nil map: %v", keys) + } + + // Check that value for missing key is zero. + x := mv.MapIndex(ValueOf("hello")) + if x.Kind() != Invalid { + t.Errorf("m.MapIndex(\"hello\") for nil map = %v, want Invalid Value", x) + } + + // Check big value too. + var mbig map[string][10 << 20]byte + x = ValueOf(mbig).MapIndex(ValueOf("hello")) + if x.Kind() != Invalid { + t.Errorf("mbig.MapIndex(\"hello\") for nil map = %v, want Invalid Value", x) + } + + // Test that deletes from a nil map succeed. + mv.SetMapIndex(ValueOf("hi"), Value{}) +} + func TestChan(t *testing.T) { for loop := 0; loop < 2; loop++ { var c chan int @@ -1434,6 +1461,46 @@ func TestFunc(t *testing.T) { } } +type emptyStruct struct{} + +type nonEmptyStruct struct { + member int +} + +func returnEmpty() emptyStruct { + return emptyStruct{} +} + +func takesEmpty(e emptyStruct) { +} + +func returnNonEmpty(i int) nonEmptyStruct { + return nonEmptyStruct{member: i} +} + +func takesNonEmpty(n nonEmptyStruct) int { + return n.member +} + +func TestCallWithStruct(t *testing.T) { + r := ValueOf(returnEmpty).Call(nil) + if len(r) != 1 || r[0].Type() != TypeOf(emptyStruct{}) { + t.Errorf("returning empty struct returned %#v instead", r) + } + r = ValueOf(takesEmpty).Call([]Value{ValueOf(emptyStruct{})}) + if len(r) != 0 { + t.Errorf("takesEmpty returned values: %#v", r) + } + r = ValueOf(returnNonEmpty).Call([]Value{ValueOf(42)}) + if len(r) != 1 || r[0].Type() != TypeOf(nonEmptyStruct{}) || r[0].Field(0).Int() != 42 { + t.Errorf("returnNonEmpty returned %#v", r) + } + r = ValueOf(takesNonEmpty).Call([]Value{ValueOf(nonEmptyStruct{member: 42})}) + if len(r) != 1 || r[0].Type() != TypeOf(1) || r[0].Int() != 42 { + t.Errorf("takesNonEmpty returned %#v", r) + } +} + func TestMakeFunc(t *testing.T) { f := dummy fv := MakeFunc(TypeOf(f), func(in []Value) []Value { return in }) @@ -1470,6 +1537,23 @@ func TestMakeFuncInterface(t *testing.T) { } } +func TestMakeFuncVariadic(t *testing.T) { + // Test that variadic arguments are packed into a slice and passed as last arg + fn := func(_ int, is ...int) []int { return nil } + fv := MakeFunc(TypeOf(fn), func(in []Value) []Value { return in[1:2] }) + ValueOf(&fn).Elem().Set(fv) + + r := fv.Call([]Value{ValueOf(1), ValueOf(2), ValueOf(3)})[0].Interface().([]int) + if r[0] != 2 || r[1] != 3 { + t.Errorf("Call returned [%v, %v]; want 2, 3", r[0], r[1]) + } + + r = fv.CallSlice([]Value{ValueOf(1), ValueOf([]int{2, 3})})[0].Interface().([]int) + if r[0] != 2 || r[1] != 3 { + t.Errorf("Call returned [%v, %v]; want 2, 3", r[0], r[1]) + } +} + type Point struct { x, y int } @@ -3616,3 +3700,142 @@ func (x *exhaustive) Choose(max int) int { func (x *exhaustive) Maybe() bool { return x.Choose(2) == 1 } + +func GCFunc(args []Value) []Value { + runtime.GC() + return []Value{} +} + +func TestReflectFuncTraceback(t *testing.T) { + f := MakeFunc(TypeOf(func() {}), GCFunc) + f.Call([]Value{}) +} + +func (p Point) GCMethod(k int) int { + runtime.GC() + return k + p.x +} + +func TestReflectMethodTraceback(t *testing.T) { + p := Point{3, 4} + m := ValueOf(p).MethodByName("GCMethod") + i := ValueOf(m.Interface()).Call([]Value{ValueOf(5)})[0].Int() + if i != 8 { + t.Errorf("Call returned %d; want 8", i) + } +} + +func TestBigZero(t *testing.T) { + const size = 1 << 10 + var v [size]byte + z := Zero(ValueOf(v).Type()).Interface().([size]byte) + for i := 0; i < size; i++ { + if z[i] != 0 { + t.Fatalf("Zero object not all zero, index %d", i) + } + } +} + +func TestFieldByIndexNil(t *testing.T) { + type P struct { + F int + } + type T struct { + *P + } + v := ValueOf(T{}) + + v.FieldByName("P") // should be fine + + defer func() { + if err := recover(); err == nil { + t.Fatalf("no error") + } else if !strings.Contains(fmt.Sprint(err), "nil pointer to embedded struct") { + t.Fatalf(`err=%q, wanted error containing "nil pointer to embedded struct"`, err) + } + }() + v.FieldByName("F") // should panic + + t.Fatalf("did not panic") +} + +// Given +// type Outer struct { +// *Inner +// ... +// } +// the compiler generates the implementation of (*Outer).M dispatching to the embedded Inner. +// The implementation is logically: +// func (p *Outer) M() { +// (p.Inner).M() +// } +// but since the only change here is the replacement of one pointer receiver with another, +// the actual generated code overwrites the original receiver with the p.Inner pointer and +// then jumps to the M method expecting the *Inner receiver. +// +// During reflect.Value.Call, we create an argument frame and the associated data structures +// to describe it to the garbage collector, populate the frame, call reflect.call to +// run a function call using that frame, and then copy the results back out of the frame. +// The reflect.call function does a memmove of the frame structure onto the +// stack (to set up the inputs), runs the call, and the memmoves the stack back to +// the frame structure (to preserve the outputs). +// +// Originally reflect.call did not distinguish inputs from outputs: both memmoves +// were for the full stack frame. However, in the case where the called function was +// one of these wrappers, the rewritten receiver is almost certainly a different type +// than the original receiver. This is not a problem on the stack, where we use the +// program counter to determine the type information and understand that +// during (*Outer).M the receiver is an *Outer while during (*Inner).M the receiver in the same +// memory word is now an *Inner. But in the statically typed argument frame created +// by reflect, the receiver is always an *Outer. Copying the modified receiver pointer +// off the stack into the frame will store an *Inner there, and then if a garbage collection +// happens to scan that argument frame before it is discarded, it will scan the *Inner +// memory as if it were an *Outer. If the two have different memory layouts, the +// collection will intepret the memory incorrectly. +// +// One such possible incorrect interpretation is to treat two arbitrary memory words +// (Inner.P1 and Inner.P2 below) as an interface (Outer.R below). Because interpreting +// an interface requires dereferencing the itab word, the misinterpretation will try to +// deference Inner.P1, causing a crash during garbage collection. +// +// This came up in a real program in issue 7725. + +type Outer struct { + *Inner + R io.Reader +} + +type Inner struct { + X *Outer + P1 uintptr + P2 uintptr +} + +func (pi *Inner) M() { + // Clear references to pi so that the only way the + // garbage collection will find the pointer is in the + // argument frame, typed as a *Outer. + pi.X.Inner = nil + + // Set up an interface value that will cause a crash. + // P1 = 1 is a non-zero, so the interface looks non-nil. + // P2 = pi ensures that the data word points into the + // allocated heap; if not the collection skips the interface + // value as irrelevant, without dereferencing P1. + pi.P1 = 1 + pi.P2 = uintptr(unsafe.Pointer(pi)) +} + +func TestCallMethodJump(t *testing.T) { + // In reflect.Value.Call, trigger a garbage collection after reflect.call + // returns but before the args frame has been discarded. + // This is a little clumsy but makes the failure repeatable. + *CallGC = true + + p := &Outer{Inner: new(Inner)} + p.Inner.X = p + ValueOf(p).Method(0).Call(nil) + + // Stop garbage collecting during reflect.call. + *CallGC = false +} diff --git a/src/pkg/reflect/asm_amd64p32.s b/src/pkg/reflect/asm_amd64p32.s new file mode 100644 index 000000000..75413c752 --- /dev/null +++ b/src/pkg/reflect/asm_amd64p32.s @@ -0,0 +1,27 @@ +// Copyright 2012 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +#include "../../cmd/ld/textflag.h" + +// makeFuncStub is the code half of the function returned by MakeFunc. +// See the comment on the declaration of makeFuncStub in makefunc.go +// for more details. +// No argsize here, gc generates argsize info at call site. +TEXT ·makeFuncStub(SB),(NOSPLIT|WRAPPER),$8 + MOVL DX, 0(SP) + LEAL argframe+0(FP), CX + MOVL CX, 4(SP) + CALL ·callReflect(SB) + RET + +// methodValueCall is the code half of the function returned by makeMethodValue. +// See the comment on the declaration of methodValueCall in makefunc.go +// for more details. +// No argsize here, gc generates argsize info at call site. +TEXT ·methodValueCall(SB),(NOSPLIT|WRAPPER),$8 + MOVL DX, 0(SP) + LEAL argframe+0(FP), CX + MOVL CX, 4(SP) + CALL ·callMethod(SB) + RET diff --git a/src/pkg/reflect/deepequal.go b/src/pkg/reflect/deepequal.go index e3bf3dcac..f63715c9a 100644 --- a/src/pkg/reflect/deepequal.go +++ b/src/pkg/reflect/deepequal.go @@ -62,9 +62,6 @@ func deepValueEqual(v1, v2 Value, visited map[visit]bool, depth int) bool { switch v1.Kind() { case Array: - if v1.Len() != v2.Len() { - return false - } for i := 0; i < v1.Len(); i++ { if !deepValueEqual(v1.Index(i), v2.Index(i), visited, depth+1) { return false diff --git a/src/pkg/reflect/export_test.go b/src/pkg/reflect/export_test.go index cd8cf2cf2..0778ad37f 100644 --- a/src/pkg/reflect/export_test.go +++ b/src/pkg/reflect/export_test.go @@ -16,3 +16,4 @@ func IsRO(v Value) bool { } var ArrayOf = arrayOf +var CallGC = &callGC diff --git a/src/pkg/reflect/makefunc.go b/src/pkg/reflect/makefunc.go index e1608ea6c..0e61fdea7 100644 --- a/src/pkg/reflect/makefunc.go +++ b/src/pkg/reflect/makefunc.go @@ -56,7 +56,7 @@ func MakeFunc(typ Type, fn func(args []Value) (results []Value)) Value { impl := &makeFuncImpl{code: code, typ: ftyp, fn: fn} - return Value{t, unsafe.Pointer(impl), flag(Func) << flagKindShift} + return Value{t, unsafe.Pointer(impl), 0, flag(Func) << flagKindShift} } // makeFuncStub is an assembly function that is the code half of @@ -81,13 +81,13 @@ type methodValue struct { // by code like Convert and Interface and Assign. func makeMethodValue(op string, v Value) Value { if v.flag&flagMethod == 0 { - panic("reflect: internal error: invalid use of makePartialFunc") + panic("reflect: internal error: invalid use of makeMethodValue") } // Ignoring the flagMethod bit, v describes the receiver, not the method type. fl := v.flag & (flagRO | flagAddr | flagIndir) fl |= flag(v.typ.Kind()) << flagKindShift - rcvr := Value{v.typ, v.val, fl} + rcvr := Value{v.typ, v.ptr, v.scalar, fl} // v.Type returns the actual type of the method value. funcType := v.Type().(*rtype) @@ -109,7 +109,7 @@ func makeMethodValue(op string, v Value) Value { // but we want Interface() and other operations to fail early. methodReceiver(op, fv.rcvr, fv.method) - return Value{funcType, unsafe.Pointer(fv), v.flag&flagRO | flag(Func)<<flagKindShift} + return Value{funcType, unsafe.Pointer(fv), 0, v.flag&flagRO | flag(Func)<<flagKindShift} } // methodValueCall is an assembly function that is the code half of diff --git a/src/pkg/reflect/type.go b/src/pkg/reflect/type.go index 7afb7defe..40d76f99d 100644 --- a/src/pkg/reflect/type.go +++ b/src/pkg/reflect/type.go @@ -16,6 +16,7 @@ package reflect import ( + "runtime" "strconv" "sync" "unsafe" @@ -252,6 +253,7 @@ type rtype struct { string *string // string form; unnecessary but undeniably useful *uncommonType // (relatively) uncommon fields ptrToThis *rtype // type for pointer to this type, if used in binary or has methods + zero unsafe.Pointer // pointer to zero value } // Method on non-interface type @@ -477,6 +479,8 @@ func (t *rtype) FieldAlign() int { return int(t.fieldAlign) } func (t *rtype) Kind() Kind { return Kind(t.kind & kindMask) } +func (t *rtype) pointers() bool { return t.kind&kindNoPointers == 0 } + func (t *rtype) common() *rtype { return t } func (t *uncommonType) Method(i int) (m Method) { @@ -495,7 +499,7 @@ func (t *uncommonType) Method(i int) (m Method) { mt := p.typ m.Type = mt fn := unsafe.Pointer(&p.tfn) - m.Func = Value{mt, fn, fl} + m.Func = Value{mt, fn, 0, fl} m.Index = i return } @@ -1089,6 +1093,7 @@ func (t *rtype) ptrTo() *rtype { p.uncommonType = nil p.ptrToThis = nil + p.zero = unsafe.Pointer(&make([]byte, p.size)[0]) p.elem = t if t.kind&kindNoPointers != 0 { @@ -1475,6 +1480,7 @@ func ChanOf(dir ChanDir, t Type) Type { ch.elem = typ ch.uncommonType = nil ch.ptrToThis = nil + ch.zero = unsafe.Pointer(&make([]byte, ch.size)[0]) ch.gc = unsafe.Pointer(&chanGC{ width: ch.size, @@ -1534,6 +1540,14 @@ func MapOf(key, elem Type) Type { mt.hmap = hMapOf(mt.bucket) mt.uncommonType = nil mt.ptrToThis = nil + mt.zero = unsafe.Pointer(&make([]byte, mt.size)[0]) + mt.gc = unsafe.Pointer(&ptrGC{ + width: unsafe.Sizeof(uintptr(0)), + op: _GC_PTR, + off: 0, + elemgc: mt.hmap.gc, + end: _GC_END, + }) // INCORRECT. Uncomment to check that TestMapOfGC and TestMapOfGCValues // fail when mt.gc is wrong. @@ -1566,6 +1580,10 @@ func bucketOf(ktyp, etyp *rtype) *rtype { gc = append(gc, _GC_PTR, offset, 0 /*self pointer set below*/) // overflow offset += ptrsize + if runtime.GOARCH == "amd64p32" { + offset += 4 + } + // keys if ktyp.kind&kindNoPointers == 0 { gc = append(gc, _GC_ARRAY_START, offset, _BUCKETSIZE, ktyp.size) @@ -1709,6 +1727,7 @@ func SliceOf(t Type) Type { slice.elem = typ slice.uncommonType = nil slice.ptrToThis = nil + slice.zero = unsafe.Pointer(&make([]byte, slice.size)[0]) if typ.size == 0 { slice.gc = unsafe.Pointer(&sliceEmptyGCProg) @@ -1778,6 +1797,7 @@ func arrayOf(count int, elem Type) Type { // TODO: array.gc array.uncommonType = nil array.ptrToThis = nil + array.zero = unsafe.Pointer(&make([]byte, array.size)[0]) array.len = uintptr(count) array.slice = slice.(*rtype) @@ -1795,3 +1815,112 @@ func toType(t *rtype) Type { } return t } + +type layoutKey struct { + t *rtype // function signature + rcvr *rtype // receiver type, or nil if none +} + +type layoutType struct { + t *rtype + argSize uintptr // size of arguments + retOffset uintptr // offset of return values. +} + +var layoutCache struct { + sync.RWMutex + m map[layoutKey]layoutType +} + +// funcLayout computes a struct type representing the layout of the +// function arguments and return values for the function type t. +// If rcvr != nil, rcvr specifies the type of the receiver. +// The returned type exists only for GC, so we only fill out GC relevant info. +// Currently, that's just size and the GC program. We also fill in +// the name for possible debugging use. +func funcLayout(t *rtype, rcvr *rtype) (frametype *rtype, argSize, retOffset uintptr) { + if t.Kind() != Func { + panic("reflect: funcLayout of non-func type") + } + if rcvr != nil && rcvr.Kind() == Interface { + panic("reflect: funcLayout with interface receiver " + rcvr.String()) + } + k := layoutKey{t, rcvr} + layoutCache.RLock() + if x := layoutCache.m[k]; x.t != nil { + layoutCache.RUnlock() + return x.t, x.argSize, x.retOffset + } + layoutCache.RUnlock() + layoutCache.Lock() + if x := layoutCache.m[k]; x.t != nil { + layoutCache.Unlock() + return x.t, x.argSize, x.retOffset + } + + tt := (*funcType)(unsafe.Pointer(t)) + + // compute gc program for arguments + gc := make([]uintptr, 1) // first entry is size, filled in at the end + offset := uintptr(0) + if rcvr != nil { + // Reflect uses the "interface" calling convention for + // methods, where receivers take one word of argument + // space no matter how big they actually are. + if rcvr.size > ptrSize { + // we pass a pointer to the receiver. + gc = append(gc, _GC_PTR, offset, uintptr(rcvr.gc)) + } else if rcvr.pointers() { + // rcvr is a one-word pointer object. Its gc program + // is just what we need here. + gc = appendGCProgram(gc, rcvr) + } + offset += ptrSize + } + for _, arg := range tt.in { + offset = align(offset, uintptr(arg.align)) + if arg.pointers() { + gc = append(gc, _GC_REGION, offset, arg.size, uintptr(arg.gc)) + } + offset += arg.size + } + argSize = offset + if runtime.GOARCH == "amd64p32" { + offset = align(offset, 8) + } + offset = align(offset, ptrSize) + retOffset = offset + for _, res := range tt.out { + offset = align(offset, uintptr(res.align)) + if res.pointers() { + gc = append(gc, _GC_REGION, offset, res.size, uintptr(res.gc)) + } + offset += res.size + } + gc = append(gc, _GC_END) + gc[0] = offset + + // build dummy rtype holding gc program + x := new(rtype) + x.size = offset + x.gc = unsafe.Pointer(&gc[0]) + var s string + if rcvr != nil { + s = "methodargs(" + *rcvr.string + ")(" + *t.string + ")" + } else { + s = "funcargs(" + *t.string + ")" + } + x.string = &s + + // cache result for future callers + if layoutCache.m == nil { + layoutCache.m = make(map[layoutKey]layoutType) + } + layoutCache.m[k] = layoutType{ + t: x, + argSize: argSize, + retOffset: retOffset, + } + layoutCache.Unlock() + return x, argSize, retOffset +} diff --git a/src/pkg/reflect/value.go b/src/pkg/reflect/value.go index df549f5e1..576cbc398 100644 --- a/src/pkg/reflect/value.go +++ b/src/pkg/reflect/value.go @@ -62,14 +62,15 @@ type Value struct { // typ holds the type of the value represented by a Value. typ *rtype - // val holds the 1-word representation of the value. - // If flag's flagIndir bit is set, then val is a pointer to the data. - // Otherwise val is a word holding the actual data. - // When the data is smaller than a word, it begins at - // the first byte (in the memory address sense) of val. - // We use unsafe.Pointer so that the garbage collector - // knows that val could be a pointer. - val unsafe.Pointer + // Pointer-valued data or, if flagIndir is set, pointer to data. + // Valid when either flagIndir is set or typ.pointers() is true. + ptr unsafe.Pointer + + // Non-pointer-valued data. When the data is smaller + // than a word, it begins at the first byte (in the memory + // address sense) of this field. + // Valid when flagIndir is not set and typ.pointers() is false. + scalar uintptr // flag holds metadata about the value. // The lowest bits are flag bits: @@ -108,6 +109,78 @@ func (f flag) kind() Kind { return Kind((f >> flagKindShift) & flagKindMask) } +// pointer returns the underlying pointer represented by v. +// v.Kind() must be Ptr, Map, Chan, Func, or UnsafePointer +func (v Value) pointer() unsafe.Pointer { + if v.typ.size != ptrSize || !v.typ.pointers() { + panic("can't call pointer on a non-pointer Value") + } + if v.flag&flagIndir != 0 { + return *(*unsafe.Pointer)(v.ptr) + } + return v.ptr +} + +// packEface converts v to the empty interface. +func packEface(v Value) interface{} { + t := v.typ + var i interface{} + e := (*emptyInterface)(unsafe.Pointer(&i)) + // First, fill in the data portion of the interface. + switch { + case t.size > ptrSize: + // Value is indirect, and so is the interface we're making. + ptr := v.ptr + if v.flag&flagAddr != 0 { + // TODO: pass safe boolean from valueInterface so + // we don't need to copy if safe==true? + c := unsafe_New(t) + memmove(c, ptr, t.size) + ptr = c + } + e.word = iword(ptr) + case v.flag&flagIndir != 0: + // Value is indirect, but interface is direct. We need + // to load the data at v.ptr into the interface data word. + if t.pointers() { + e.word = iword(*(*unsafe.Pointer)(v.ptr)) + } else { + e.word = iword(loadScalar(v.ptr, t.size)) + } + default: + // Value is direct, and so is the interface. + if t.pointers() { + e.word = iword(v.ptr) + } else { + e.word = iword(v.scalar) + } + } + // Now, fill in the type portion. We're very careful here not + // to have any operation between the e.word and e.typ assignments + // that would let the garbage collector observe the partially-built + // interface value. + e.typ = t + return i +} + +// unpackEface converts the empty interface i to a Value. +func unpackEface(i interface{}) Value { + e := (*emptyInterface)(unsafe.Pointer(&i)) + // NOTE: don't read e.word until we know whether it is really a pointer or not. + t := e.typ + if t == nil { + return Value{} + } + f := flag(t.Kind()) << flagKindShift + if t.size > ptrSize { + return Value{t, unsafe.Pointer(e.word), 0, f | flagIndir} + } + if t.pointers() { + return Value{t, unsafe.Pointer(e.word), 0, f} + } + return Value{t, nil, uintptr(e.word), f} +} + // A ValueError occurs when a Value method is invoked on // a Value that does not support it. Such cases are documented // in the description of each method. @@ -139,28 +212,21 @@ func methodName() string { // bigger than a pointer, its word is a pointer to v's data. // Otherwise, its word holds the data stored // in its leading bytes (so is not a pointer). -// Because the value sometimes holds a pointer, we use -// unsafe.Pointer to represent it, so that if iword appears -// in a struct, the garbage collector knows that might be -// a pointer. +// This type is very dangerous for the garbage collector because +// it must be treated conservatively. We try to never expose it +// to the GC here so that GC remains precise. type iword unsafe.Pointer -func (v Value) iword() iword { - if v.flag&flagIndir != 0 && v.typ.size <= ptrSize { - // Have indirect but want direct word. - return loadIword(v.val, v.typ.size) - } - return iword(v.val) -} - -// loadIword loads n bytes at p from memory into an iword. -func loadIword(p unsafe.Pointer, n uintptr) iword { +// loadScalar loads n bytes at p from memory into a uintptr +// that forms the second word of an interface. The data +// must be non-pointer in nature. +func loadScalar(p unsafe.Pointer, n uintptr) uintptr { // Run the copy ourselves instead of calling memmove // to avoid moving w to the heap. - var w iword + var w uintptr switch n { default: - panic("reflect: internal error: loadIword of " + strconv.Itoa(int(n)) + "-byte value") + panic("reflect: internal error: loadScalar of " + strconv.Itoa(int(n)) + "-byte value") case 0: case 1: *(*uint8)(unsafe.Pointer(&w)) = *(*uint8)(p) @@ -182,13 +248,13 @@ func loadIword(p unsafe.Pointer, n uintptr) iword { return w } -// storeIword stores n bytes from w into p. -func storeIword(p unsafe.Pointer, w iword, n uintptr) { +// storeScalar stores n bytes from w into p. +func storeScalar(p unsafe.Pointer, w uintptr, n uintptr) { // Run the copy ourselves instead of calling memmove // to avoid moving w to the heap. switch n { default: - panic("reflect: internal error: storeIword of " + strconv.Itoa(int(n)) + "-byte value") + panic("reflect: internal error: storeScalar of " + strconv.Itoa(int(n)) + "-byte value") case 0: case 1: *(*uint8)(p) = *(*uint8)(unsafe.Pointer(&w)) @@ -278,7 +344,7 @@ func (v Value) Addr() Value { if v.flag&flagAddr == 0 { panic("reflect.Value.Addr of unaddressable value") } - return Value{v.typ.ptrTo(), v.val, (v.flag & flagRO) | flag(Ptr)<<flagKindShift} + return Value{v.typ.ptrTo(), v.ptr, 0, (v.flag & flagRO) | flag(Ptr)<<flagKindShift} } // Bool returns v's underlying value. @@ -286,9 +352,9 @@ func (v Value) Addr() Value { func (v Value) Bool() bool { v.mustBe(Bool) if v.flag&flagIndir != 0 { - return *(*bool)(v.val) + return *(*bool)(v.ptr) } - return *(*bool)(unsafe.Pointer(&v.val)) + return *(*bool)(unsafe.Pointer(&v.scalar)) } // Bytes returns v's underlying value. @@ -299,7 +365,7 @@ func (v Value) Bytes() []byte { panic("reflect.Value.Bytes of non-byte slice") } // Slice is always bigger than a word; assume flagIndir. - return *(*[]byte)(v.val) + return *(*[]byte)(v.ptr) } // runes returns v's underlying value. @@ -310,7 +376,7 @@ func (v Value) runes() []rune { panic("reflect.Value.Bytes of non-rune slice") } // Slice is always bigger than a word; assume flagIndir. - return *(*[]rune)(v.val) + return *(*[]rune)(v.ptr) } // CanAddr returns true if the value's address can be obtained with Addr. @@ -358,19 +424,28 @@ func (v Value) CallSlice(in []Value) []Value { return v.call("CallSlice", in) } +var callGC bool // for testing; see TestCallMethodJump + +var makeFuncStubFn = makeFuncStub +var makeFuncStubCode = **(**uintptr)(unsafe.Pointer(&makeFuncStubFn)) +var methodValueCallFn = methodValueCall +var methodValueCallCode = **(**uintptr)(unsafe.Pointer(&methodValueCallFn)) + func (v Value) call(op string, in []Value) []Value { // Get function pointer, type. t := v.typ var ( - fn unsafe.Pointer - rcvr iword + fn unsafe.Pointer + rcvr Value + rcvrtype *rtype ) if v.flag&flagMethod != 0 { - t, fn, rcvr = methodReceiver(op, v, int(v.flag)>>flagMethodShift) + rcvr = v + rcvrtype, t, fn = methodReceiver(op, v, int(v.flag)>>flagMethodShift) } else if v.flag&flagIndir != 0 { - fn = *(*unsafe.Pointer)(v.val) + fn = *(*unsafe.Pointer)(v.ptr) } else { - fn = v.val + fn = v.ptr } if fn == nil { @@ -434,23 +509,36 @@ func (v Value) call(op string, in []Value) []Value { } nout := t.NumOut() - // Compute arg size & allocate. - // This computation is 5g/6g/8g-dependent - // and probably wrong for gccgo, but so - // is most of this function. - size, _, _, _ := frameSize(t, v.flag&flagMethod != 0) - - // Copy into args. - // - // TODO(rsc): This will need to be updated for any new garbage collector. - // For now make everything look like a pointer by allocating - // a []unsafe.Pointer. - args := make([]unsafe.Pointer, size/ptrSize) - ptr := unsafe.Pointer(&args[0]) + // If target is makeFuncStub, short circuit the unpack onto stack / + // pack back into []Value for the args and return values. Just do the + // call directly. + // We need to do this here because otherwise we have a situation where + // reflect.callXX calls makeFuncStub, neither of which knows the + // layout of the args. That's bad for precise gc & stack copying. + x := (*makeFuncImpl)(fn) + if x.code == makeFuncStubCode { + return x.fn(in) + } + + // If the target is methodValueCall, do its work here: add the receiver + // argument and call the real target directly. + // We need to do this here because otherwise we have a situation where + // reflect.callXX calls methodValueCall, neither of which knows the + // layout of the args. That's bad for precise gc & stack copying. + y := (*methodValue)(fn) + if y.fn == methodValueCallCode { + rcvr = y.rcvr + rcvrtype, t, fn = methodReceiver("call", rcvr, y.method) + } + + // Compute frame type, allocate a chunk of memory for frame + frametype, _, retOffset := funcLayout(t, rcvrtype) + args := unsafe_New(frametype) off := uintptr(0) - if v.flag&flagMethod != 0 { - // Hard-wired first argument. - *(*iword)(ptr) = rcvr + + // Copy inputs into args. + if rcvrtype != nil { + storeRcvr(rcvr, args) off = ptrSize } for i, v := range in { @@ -459,30 +547,35 @@ func (v Value) call(op string, in []Value) []Value { a := uintptr(targ.align) off = (off + a - 1) &^ (a - 1) n := targ.size - addr := unsafe.Pointer(uintptr(ptr) + off) + addr := unsafe.Pointer(uintptr(args) + off) v = v.assignTo("reflect.Value.Call", targ, (*interface{})(addr)) - if v.flag&flagIndir == 0 { - storeIword(addr, iword(v.val), n) + if v.flag&flagIndir != 0 { + memmove(addr, v.ptr, n) + } else if targ.pointers() { + *(*unsafe.Pointer)(addr) = v.ptr } else { - memmove(addr, v.val, n) + storeScalar(addr, v.scalar, n) } off += n } - off = (off + ptrSize - 1) &^ (ptrSize - 1) // Call. - call(fn, ptr, uint32(size)) + call(fn, args, uint32(frametype.size), uint32(retOffset)) + + // For testing; see TestCallMethodJump. + if callGC { + runtime.GC() + } // Copy return values out of args. - // - // TODO(rsc): revisit like above. ret := make([]Value, nout) + off = retOffset for i := 0; i < nout; i++ { tv := t.Out(i) a := uintptr(tv.Align()) off = (off + a - 1) &^ (a - 1) fl := flagIndir | flag(tv.Kind())<<flagKindShift - ret[i] = Value{tv.common(), unsafe.Pointer(uintptr(ptr) + off), fl} + ret[i] = Value{tv.common(), unsafe.Pointer(uintptr(args) + off), 0, fl} off += tv.Size() } @@ -512,18 +605,20 @@ func callReflect(ctxt *makeFuncImpl, frame unsafe.Pointer) { for _, arg := range ftyp.in { typ := arg off += -off & uintptr(typ.align-1) - v := Value{typ, nil, flag(typ.Kind()) << flagKindShift} - if typ.size <= ptrSize { - // value fits in word. - v.val = unsafe.Pointer(loadIword(unsafe.Pointer(uintptr(ptr)+off), typ.size)) - } else { + addr := unsafe.Pointer(uintptr(ptr) + off) + v := Value{typ, nil, 0, flag(typ.Kind()) << flagKindShift} + if typ.size > ptrSize { // value does not fit in word. // Must make a copy, because f might keep a reference to it, // and we cannot let f keep a reference to the stack frame // after this function returns, not even a read-only reference. - v.val = unsafe_New(typ) - memmove(v.val, unsafe.Pointer(uintptr(ptr)+off), typ.size) + v.ptr = unsafe_New(typ) + memmove(v.ptr, addr, typ.size) v.flag |= flagIndir + } else if typ.pointers() { + v.ptr = *(*unsafe.Pointer)(addr) + } else { + v.scalar = loadScalar(addr, typ.size) } in = append(in, v) off += typ.size @@ -538,6 +633,9 @@ func callReflect(ctxt *makeFuncImpl, frame unsafe.Pointer) { // Copy results back into argument frame. if len(ftyp.out) > 0 { off += -off & (ptrSize - 1) + if runtime.GOARCH == "amd64p32" { + off = align(off, 8) + } for i, arg := range ftyp.out { typ := arg v := out[i] @@ -552,10 +650,12 @@ func callReflect(ctxt *makeFuncImpl, frame unsafe.Pointer) { } off += -off & uintptr(typ.align-1) addr := unsafe.Pointer(uintptr(ptr) + off) - if v.flag&flagIndir == 0 { - storeIword(addr, iword(v.val), typ.size) + if v.flag&flagIndir != 0 { + memmove(addr, v.ptr, typ.size) + } else if typ.pointers() { + *(*unsafe.Pointer)(addr) = v.ptr } else { - memmove(addr, v.val, typ.size) + storeScalar(addr, v.scalar, typ.size) } off += typ.size } @@ -566,7 +666,10 @@ func callReflect(ctxt *makeFuncImpl, frame unsafe.Pointer) { // described by v. The Value v may or may not have the // flagMethod bit set, so the kind cached in v.flag should // not be used. -func methodReceiver(op string, v Value, methodIndex int) (t *rtype, fn unsafe.Pointer, rcvr iword) { +// The return value rcvrtype gives the method's actual receiver type. +// The return value t gives the method type signature (without the receiver). +// The return value fn is a pointer to the method code. +func methodReceiver(op string, v Value, methodIndex int) (rcvrtype, t *rtype, fn unsafe.Pointer) { i := methodIndex if v.typ.Kind() == Interface { tt := (*interfaceType)(unsafe.Pointer(v.typ)) @@ -577,14 +680,15 @@ func methodReceiver(op string, v Value, methodIndex int) (t *rtype, fn unsafe.Po if m.pkgPath != nil { panic("reflect: " + op + " of unexported method") } - t = m.typ - iface := (*nonEmptyInterface)(v.val) + iface := (*nonEmptyInterface)(v.ptr) if iface.itab == nil { panic("reflect: " + op + " of method on nil interface value") } + rcvrtype = iface.itab.typ fn = unsafe.Pointer(&iface.itab.fun[i]) - rcvr = iface.word + t = m.typ } else { + rcvrtype = v.typ ut := v.typ.uncommon() if ut == nil || i < 0 || i >= len(ut.methods) { panic("reflect: internal error: invalid method index") @@ -595,58 +699,41 @@ func methodReceiver(op string, v Value, methodIndex int) (t *rtype, fn unsafe.Po } fn = unsafe.Pointer(&m.ifn) t = m.mtyp - rcvr = v.iword() } return } +// v is a method receiver. Store at p the word which is used to +// encode that receiver at the start of the argument list. +// Reflect uses the "interface" calling convention for +// methods, which always uses one word to record the receiver. +func storeRcvr(v Value, p unsafe.Pointer) { + t := v.typ + if t.Kind() == Interface { + // the interface data word becomes the receiver word + iface := (*nonEmptyInterface)(v.ptr) + *(*unsafe.Pointer)(p) = unsafe.Pointer(iface.word) + } else if v.flag&flagIndir != 0 { + if t.size > ptrSize { + *(*unsafe.Pointer)(p) = v.ptr + } else if t.pointers() { + *(*unsafe.Pointer)(p) = *(*unsafe.Pointer)(v.ptr) + } else { + *(*uintptr)(p) = loadScalar(v.ptr, t.size) + } + } else if t.pointers() { + *(*unsafe.Pointer)(p) = v.ptr + } else { + *(*uintptr)(p) = v.scalar + } +} + // align returns the result of rounding x up to a multiple of n. // n must be a power of two. func align(x, n uintptr) uintptr { return (x + n - 1) &^ (n - 1) } -// frameSize returns the sizes of the argument and result frame -// for a function of the given type. The rcvr bool specifies whether -// a one-word receiver should be included in the total. -func frameSize(t *rtype, rcvr bool) (total, in, outOffset, out uintptr) { - if rcvr { - // extra word for receiver interface word - total += ptrSize - } - - nin := t.NumIn() - in = -total - for i := 0; i < nin; i++ { - tv := t.In(i) - total = align(total, uintptr(tv.Align())) - total += tv.Size() - } - in += total - total = align(total, ptrSize) - nout := t.NumOut() - outOffset = total - out = -total - for i := 0; i < nout; i++ { - tv := t.Out(i) - total = align(total, uintptr(tv.Align())) - total += tv.Size() - } - out += total - - // total must be > 0 in order for &args[0] to be valid. - // the argument copying is going to round it up to - // a multiple of ptrSize anyway, so make it ptrSize to begin with. - if total < ptrSize { - total = ptrSize - } - - // round to pointer - total = align(total, ptrSize) - - return -} - // callMethod is the call implementation used by a function returned // by makeMethodValue (used by v.Method(i).Interface()). // It is a streamlined version of the usual reflect call: the caller has @@ -659,24 +746,31 @@ func frameSize(t *rtype, rcvr bool) (total, in, outOffset, out uintptr) { // so that the linker can make it work correctly for panic and recover. // The gc compilers know to do that for the name "reflect.callMethod". func callMethod(ctxt *methodValue, frame unsafe.Pointer) { - t, fn, rcvr := methodReceiver("call", ctxt.rcvr, ctxt.method) - total, in, outOffset, out := frameSize(t, true) - - // Copy into args. - // - // TODO(rsc): This will need to be updated for any new garbage collector. - // For now make everything look like a pointer by allocating - // a []unsafe.Pointer. - args := make([]unsafe.Pointer, total/ptrSize) - args[0] = unsafe.Pointer(rcvr) - base := unsafe.Pointer(&args[0]) - memmove(unsafe.Pointer(uintptr(base)+ptrSize), frame, in) + rcvr := ctxt.rcvr + rcvrtype, t, fn := methodReceiver("call", rcvr, ctxt.method) + frametype, argSize, retOffset := funcLayout(t, rcvrtype) + + // Make a new frame that is one word bigger so we can store the receiver. + args := unsafe_New(frametype) + + // Copy in receiver and rest of args. + storeRcvr(rcvr, args) + memmove(unsafe.Pointer(uintptr(args)+ptrSize), frame, argSize-ptrSize) // Call. - call(fn, unsafe.Pointer(&args[0]), uint32(total)) + call(fn, args, uint32(frametype.size), uint32(retOffset)) - // Copy return values. - memmove(unsafe.Pointer(uintptr(frame)+outOffset-ptrSize), unsafe.Pointer(uintptr(base)+outOffset), out) + // Copy return values. On amd64p32, the beginning of return values + // is 64-bit aligned, so the caller's frame layout (which doesn't have + // a receiver) is different from the layout of the fn call, which has + // a receiver. + // Ignore any changes to args and just copy return values. + callerRetOffset := retOffset - ptrSize + if runtime.GOARCH == "amd64p32" { + callerRetOffset = align(argSize-ptrSize, 8) + } + memmove(unsafe.Pointer(uintptr(frame)+callerRetOffset), + unsafe.Pointer(uintptr(args)+retOffset), frametype.size-retOffset) } // funcName returns the name of f, for use in error messages. @@ -697,10 +791,10 @@ func (v Value) Cap() int { case Array: return v.typ.Len() case Chan: - return int(chancap(v.iword())) + return int(chancap(v.pointer())) case Slice: // Slice is always bigger than a word; assume flagIndir. - return (*SliceHeader)(v.val).Cap + return (*sliceHeader)(v.ptr).Cap } panic(&ValueError{"reflect.Value.Cap", k}) } @@ -710,7 +804,7 @@ func (v Value) Cap() int { func (v Value) Close() { v.mustBe(Chan) v.mustBeExported() - chanclose(v.iword()) + chanclose(v.pointer()) } // Complex returns v's underlying value, as a complex128. @@ -720,12 +814,12 @@ func (v Value) Complex() complex128 { switch k { case Complex64: if v.flag&flagIndir != 0 { - return complex128(*(*complex64)(v.val)) + return complex128(*(*complex64)(v.ptr)) } - return complex128(*(*complex64)(unsafe.Pointer(&v.val))) + return complex128(*(*complex64)(unsafe.Pointer(&v.scalar))) case Complex128: // complex128 is always bigger than a word; assume flagIndir. - return *(*complex128)(v.val) + return *(*complex128)(v.ptr) } panic(&ValueError{"reflect.Value.Complex", k}) } @@ -738,48 +832,31 @@ func (v Value) Elem() Value { k := v.kind() switch k { case Interface: - var ( - typ *rtype - val unsafe.Pointer - ) + var eface interface{} if v.typ.NumMethod() == 0 { - eface := (*emptyInterface)(v.val) - if eface.typ == nil { - // nil interface value - return Value{} - } - typ = eface.typ - val = unsafe.Pointer(eface.word) + eface = *(*interface{})(v.ptr) } else { - iface := (*nonEmptyInterface)(v.val) - if iface.itab == nil { - // nil interface value - return Value{} - } - typ = iface.itab.typ - val = unsafe.Pointer(iface.word) - } - fl := v.flag & flagRO - fl |= flag(typ.Kind()) << flagKindShift - if typ.size > ptrSize { - fl |= flagIndir + eface = (interface{})(*(*interface { + M() + })(v.ptr)) } - return Value{typ, val, fl} - + x := unpackEface(eface) + x.flag |= v.flag & flagRO + return x case Ptr: - val := v.val + ptr := v.ptr if v.flag&flagIndir != 0 { - val = *(*unsafe.Pointer)(val) + ptr = *(*unsafe.Pointer)(ptr) } // The returned value's address is v's value. - if val == nil { + if ptr == nil { return Value{} } tt := (*ptrType)(unsafe.Pointer(v.typ)) typ := tt.elem fl := v.flag&flagRO | flagIndir | flagAddr fl |= flag(typ.Kind() << flagKindShift) - return Value{typ, val, fl} + return Value{typ, ptr, 0, fl} } panic(&ValueError{"reflect.Value.Elem", k}) } @@ -803,20 +880,26 @@ func (v Value) Field(i int) Value { } fl |= flag(typ.Kind()) << flagKindShift - var val unsafe.Pointer + var ptr unsafe.Pointer + var scalar uintptr switch { case fl&flagIndir != 0: // Indirect. Just bump pointer. - val = unsafe.Pointer(uintptr(v.val) + field.offset) + ptr = unsafe.Pointer(uintptr(v.ptr) + field.offset) + case typ.pointers(): + if field.offset != 0 { + panic("field access of ptr value isn't at offset 0") + } + ptr = v.ptr case bigEndian: - // Direct. Discard leading bytes. - val = unsafe.Pointer(uintptr(v.val) << (field.offset * 8)) + // Must be scalar. Discard leading bytes. + scalar = v.scalar << (field.offset * 8) default: - // Direct. Discard leading bytes. - val = unsafe.Pointer(uintptr(v.val) >> (field.offset * 8)) + // Must be scalar. Discard leading bytes. + scalar = v.scalar >> (field.offset * 8) } - return Value{typ, val, fl} + return Value{typ, ptr, scalar, fl} } // FieldByIndex returns the nested field corresponding to index. @@ -825,7 +908,10 @@ func (v Value) FieldByIndex(index []int) Value { v.mustBe(Struct) for i, x := range index { if i > 0 { - if v.Kind() == Ptr && v.Elem().Kind() == Struct { + if v.Kind() == Ptr && v.typ.Elem().Kind() == Struct { + if v.IsNil() { + panic("reflect: indirection through nil pointer to embedded struct") + } v = v.Elem() } } @@ -864,14 +950,14 @@ func (v Value) Float() float64 { switch k { case Float32: if v.flag&flagIndir != 0 { - return float64(*(*float32)(v.val)) + return float64(*(*float32)(v.ptr)) } - return float64(*(*float32)(unsafe.Pointer(&v.val))) + return float64(*(*float32)(unsafe.Pointer(&v.scalar))) case Float64: if v.flag&flagIndir != 0 { - return *(*float64)(v.val) + return *(*float64)(v.ptr) } - return *(*float64)(unsafe.Pointer(&v.val)) + return *(*float64)(unsafe.Pointer(&v.scalar)) } panic(&ValueError{"reflect.Value.Float", k}) } @@ -894,41 +980,48 @@ func (v Value) Index(i int) Value { offset := uintptr(i) * typ.size var val unsafe.Pointer + var scalar uintptr switch { case fl&flagIndir != 0: // Indirect. Just bump pointer. - val = unsafe.Pointer(uintptr(v.val) + offset) + val = unsafe.Pointer(uintptr(v.ptr) + offset) + case typ.pointers(): + if offset != 0 { + panic("can't Index(i) with i!=0 on ptrLike value") + } + val = v.ptr case bigEndian: // Direct. Discard leading bytes. - val = unsafe.Pointer(uintptr(v.val) << (offset * 8)) + scalar = v.scalar << (offset * 8) default: // Direct. Discard leading bytes. - val = unsafe.Pointer(uintptr(v.val) >> (offset * 8)) + scalar = v.scalar >> (offset * 8) } - return Value{typ, val, fl} + return Value{typ, val, scalar, fl} case Slice: // Element flag same as Elem of Ptr. // Addressable, indirect, possibly read-only. fl := flagAddr | flagIndir | v.flag&flagRO - s := (*SliceHeader)(v.val) + s := (*sliceHeader)(v.ptr) if i < 0 || i >= s.Len { panic("reflect: slice index out of range") } tt := (*sliceType)(unsafe.Pointer(v.typ)) typ := tt.elem fl |= flag(typ.Kind()) << flagKindShift - val := unsafe.Pointer(s.Data + uintptr(i)*typ.size) - return Value{typ, val, fl} + val := unsafe.Pointer(uintptr(s.Data) + uintptr(i)*typ.size) + return Value{typ, val, 0, fl} case String: fl := v.flag&flagRO | flag(Uint8<<flagKindShift) - s := (*StringHeader)(v.val) + s := (*stringHeader)(v.ptr) if i < 0 || i >= s.Len { panic("reflect: string index out of range") } - val := *(*byte)(unsafe.Pointer(s.Data + uintptr(i))) - return Value{uint8Type, unsafe.Pointer(uintptr(val)), fl} + b := uintptr(0) + *(*byte)(unsafe.Pointer(&b)) = *(*byte)(unsafe.Pointer(uintptr(s.Data) + uintptr(i))) + return Value{uint8Type, nil, b, fl} } panic(&ValueError{"reflect.Value.Index", k}) } @@ -939,11 +1032,11 @@ func (v Value) Int() int64 { k := v.kind() var p unsafe.Pointer if v.flag&flagIndir != 0 { - p = v.val + p = v.ptr } else { - // The escape analysis is good enough that &v.val + // The escape analysis is good enough that &v.scalar // does not trigger a heap allocation. - p = unsafe.Pointer(&v.val) + p = unsafe.Pointer(&v.scalar) } switch k { case Int: @@ -991,51 +1084,42 @@ func valueInterface(v Value, safe bool) interface{} { v = makeMethodValue("Interface", v) } - k := v.kind() - if k == Interface { + if v.kind() == Interface { // Special case: return the element inside the interface. // Empty interface has one layout, all interfaces with // methods have a second layout. if v.NumMethod() == 0 { - return *(*interface{})(v.val) + return *(*interface{})(v.ptr) } return *(*interface { M() - })(v.val) - } - - // Non-interface value. - var eface emptyInterface - eface.typ = v.typ - eface.word = v.iword() - - // Don't need to allocate if v is not addressable or fits in one word. - if v.flag&flagAddr != 0 && v.typ.size > ptrSize { - // eface.word is a pointer to the actual data, - // which might be changed. We need to return - // a pointer to unchanging data, so make a copy. - ptr := unsafe_New(v.typ) - memmove(ptr, unsafe.Pointer(eface.word), v.typ.size) - eface.word = iword(ptr) + })(v.ptr) } - return *(*interface{})(unsafe.Pointer(&eface)) + // TODO: pass safe to packEface so we don't need to copy if safe==true? + return packEface(v) } // InterfaceData returns the interface v's value as a uintptr pair. // It panics if v's Kind is not Interface. func (v Value) InterfaceData() [2]uintptr { + // TODO: deprecate this v.mustBe(Interface) // We treat this as a read operation, so we allow // it even for unexported data, because the caller // has to import "unsafe" to turn it into something // that can be abused. // Interface value is always bigger than a word; assume flagIndir. - return *(*[2]uintptr)(v.val) + return *(*[2]uintptr)(v.ptr) } -// IsNil returns true if v is a nil value. -// It panics if v's Kind is not Chan, Func, Interface, Map, Ptr, or Slice. +// IsNil reports whether its argument v is nil. The argument must be +// a chan, func, interface, map, pointer, or slice value; if it is +// not, IsNil panics. Note that IsNil is not always equivalent to a +// regular comparison with nil in Go. For example, if v was created +// by calling ValueOf with an uninitialized interface variable i, +// i==nil will be true but v.IsNil will panic as v will be the zero +// Value. func (v Value) IsNil() bool { k := v.kind() switch k { @@ -1043,7 +1127,7 @@ func (v Value) IsNil() bool { if v.flag&flagMethod != 0 { return false } - ptr := v.val + ptr := v.ptr if v.flag&flagIndir != 0 { ptr = *(*unsafe.Pointer)(ptr) } @@ -1051,7 +1135,7 @@ func (v Value) IsNil() bool { case Interface, Slice: // Both interface and slice are nil if first word is 0. // Both are always bigger than a word; assume flagIndir. - return *(*unsafe.Pointer)(v.val) == nil + return *(*unsafe.Pointer)(v.ptr) == nil } panic(&ValueError{"reflect.Value.IsNil", k}) } @@ -1080,15 +1164,15 @@ func (v Value) Len() int { tt := (*arrayType)(unsafe.Pointer(v.typ)) return int(tt.len) case Chan: - return chanlen(v.iword()) + return chanlen(v.pointer()) case Map: - return maplen(v.iword()) + return maplen(v.pointer()) case Slice: // Slice is bigger than a word; assume flagIndir. - return (*SliceHeader)(v.val).Len + return (*sliceHeader)(v.ptr).Len case String: // String is bigger than a word; assume flagIndir. - return (*StringHeader)(v.val).Len + return (*stringHeader)(v.ptr).Len } panic(&ValueError{"reflect.Value.Len", k}) } @@ -1110,17 +1194,32 @@ func (v Value) MapIndex(key Value) Value { // of unexported fields. key = key.assignTo("reflect.Value.MapIndex", tt.key, nil) - word, ok := mapaccess(v.typ, v.iword(), key.iword()) - if !ok { + var k unsafe.Pointer + if key.flag&flagIndir != 0 { + k = key.ptr + } else if key.typ.pointers() { + k = unsafe.Pointer(&key.ptr) + } else { + k = unsafe.Pointer(&key.scalar) + } + e := mapaccess(v.typ, v.pointer(), k) + if e == nil { return Value{} } typ := tt.elem fl := (v.flag | key.flag) & flagRO + fl |= flag(typ.Kind()) << flagKindShift if typ.size > ptrSize { - fl |= flagIndir + // Copy result so future changes to the map + // won't change the underlying value. + c := unsafe_New(typ) + memmove(c, e, typ.size) + return Value{typ, c, 0, fl | flagIndir} + } else if typ.pointers() { + return Value{typ, *(*unsafe.Pointer)(e), 0, fl} + } else { + return Value{typ, nil, loadScalar(e, typ.size), fl} } - fl |= flag(typ.Kind()) << flagKindShift - return Value{typ, unsafe.Pointer(word), fl} } // MapKeys returns a slice containing all the keys present in the map, @@ -1132,13 +1231,9 @@ func (v Value) MapKeys() []Value { tt := (*mapType)(unsafe.Pointer(v.typ)) keyType := tt.key - fl := v.flag & flagRO - fl |= flag(keyType.Kind()) << flagKindShift - if keyType.size > ptrSize { - fl |= flagIndir - } + fl := v.flag&flagRO | flag(keyType.Kind())<<flagKindShift - m := v.iword() + m := v.pointer() mlen := int(0) if m != nil { mlen = maplen(m) @@ -1147,11 +1242,24 @@ func (v Value) MapKeys() []Value { a := make([]Value, mlen) var i int for i = 0; i < len(a); i++ { - keyWord, ok := mapiterkey(it) - if !ok { + key := mapiterkey(it) + if key == nil { + // Someone deleted an entry from the map since we + // called maplen above. It's a data race, but nothing + // we can do about it. break } - a[i] = Value{keyType, unsafe.Pointer(keyWord), fl} + if keyType.size > ptrSize { + // Copy result so future changes to the map + // won't change the underlying value. + c := unsafe_New(keyType) + memmove(c, key, keyType.size) + a[i] = Value{keyType, c, 0, fl | flagIndir} + } else if keyType.pointers() { + a[i] = Value{keyType, *(*unsafe.Pointer)(key), 0, fl} + } else { + a[i] = Value{keyType, nil, loadScalar(key, keyType.size), fl} + } mapiternext(it) } return a[:i] @@ -1174,7 +1282,7 @@ func (v Value) Method(i int) Value { fl := v.flag & (flagRO | flagIndir) fl |= flag(Func) << flagKindShift fl |= flag(i)<<flagMethodShift | flagMethod - return Value{v.typ, v.val, fl} + return Value{v.typ, v.ptr, v.scalar, fl} } // NumMethod returns the number of methods in the value's method set. @@ -1284,15 +1392,16 @@ func (v Value) OverflowUint(x uint64) bool { // code pointer, but not necessarily enough to identify a // single function uniquely. The only guarantee is that the // result is zero if and only if v is a nil func Value. +// +// If v's Kind is Slice, the returned pointer is to the first +// element of the slice. If the slice is nil the returned value +// is 0. If the slice is empty but non-nil the return value is non-zero. func (v Value) Pointer() uintptr { + // TODO: deprecate k := v.kind() switch k { case Chan, Map, Ptr, UnsafePointer: - p := v.val - if v.flag&flagIndir != 0 { - p = *(*unsafe.Pointer)(p) - } - return uintptr(p) + return uintptr(v.pointer()) case Func: if v.flag&flagMethod != 0 { // As the doc comment says, the returned pointer is an @@ -1304,10 +1413,7 @@ func (v Value) Pointer() uintptr { f := methodValueCall return **(**uintptr)(unsafe.Pointer(&f)) } - p := v.val - if v.flag&flagIndir != 0 { - p = *(*unsafe.Pointer)(p) - } + p := v.pointer() // Non-nil func value points at data block. // First word of data block is actual code. if p != nil { @@ -1316,7 +1422,7 @@ func (v Value) Pointer() uintptr { return uintptr(p) case Slice: - return (*SliceHeader)(v.val).Data + return (*SliceHeader)(v.ptr).Data } panic(&ValueError{"reflect.Value.Pointer", k}) } @@ -1339,14 +1445,21 @@ func (v Value) recv(nb bool) (val Value, ok bool) { if ChanDir(tt.dir)&RecvDir == 0 { panic("reflect: recv on send-only channel") } - word, selected, ok := chanrecv(v.typ, v.iword(), nb) - if selected { - typ := tt.elem - fl := flag(typ.Kind()) << flagKindShift - if typ.size > ptrSize { - fl |= flagIndir - } - val = Value{typ, unsafe.Pointer(word), fl} + t := tt.elem + val = Value{t, nil, 0, flag(t.Kind()) << flagKindShift} + var p unsafe.Pointer + if t.size > ptrSize { + p = unsafe_New(t) + val.ptr = p + val.flag |= flagIndir + } else if t.pointers() { + p = unsafe.Pointer(&val.ptr) + } else { + p = unsafe.Pointer(&val.scalar) + } + selected, ok := chanrecv(v.typ, v.pointer(), nb, p) + if !selected { + val = Value{} } return } @@ -1369,7 +1482,15 @@ func (v Value) send(x Value, nb bool) (selected bool) { } x.mustBeExported() x = x.assignTo("reflect.Value.Send", tt.elem, nil) - return chansend(v.typ, v.iword(), x.iword(), nb) + var p unsafe.Pointer + if x.flag&flagIndir != 0 { + p = x.ptr + } else if x.typ.pointers() { + p = unsafe.Pointer(&x.ptr) + } else { + p = unsafe.Pointer(&x.scalar) + } + return chansend(v.typ, v.pointer(), p, nb) } // Set assigns x to the value v. @@ -1380,13 +1501,15 @@ func (v Value) Set(x Value) { x.mustBeExported() // do not let unexported x leak var target *interface{} if v.kind() == Interface { - target = (*interface{})(v.val) + target = (*interface{})(v.ptr) } x = x.assignTo("reflect.Set", v.typ, target) if x.flag&flagIndir != 0 { - memmove(v.val, x.val, v.typ.size) + memmove(v.ptr, x.ptr, v.typ.size) + } else if x.typ.pointers() { + *(*unsafe.Pointer)(v.ptr) = x.ptr } else { - storeIword(v.val, iword(x.val), v.typ.size) + memmove(v.ptr, unsafe.Pointer(&x.scalar), v.typ.size) } } @@ -1395,7 +1518,7 @@ func (v Value) Set(x Value) { func (v Value) SetBool(x bool) { v.mustBeAssignable() v.mustBe(Bool) - *(*bool)(v.val) = x + *(*bool)(v.ptr) = x } // SetBytes sets v's underlying value. @@ -1406,7 +1529,7 @@ func (v Value) SetBytes(x []byte) { if v.typ.Elem().Kind() != Uint8 { panic("reflect.Value.SetBytes of non-byte slice") } - *(*[]byte)(v.val) = x + *(*[]byte)(v.ptr) = x } // setRunes sets v's underlying value. @@ -1417,7 +1540,7 @@ func (v Value) setRunes(x []rune) { if v.typ.Elem().Kind() != Int32 { panic("reflect.Value.setRunes of non-rune slice") } - *(*[]rune)(v.val) = x + *(*[]rune)(v.ptr) = x } // SetComplex sets v's underlying value to x. @@ -1428,9 +1551,9 @@ func (v Value) SetComplex(x complex128) { default: panic(&ValueError{"reflect.Value.SetComplex", k}) case Complex64: - *(*complex64)(v.val) = complex64(x) + *(*complex64)(v.ptr) = complex64(x) case Complex128: - *(*complex128)(v.val) = x + *(*complex128)(v.ptr) = x } } @@ -1442,9 +1565,9 @@ func (v Value) SetFloat(x float64) { default: panic(&ValueError{"reflect.Value.SetFloat", k}) case Float32: - *(*float32)(v.val) = float32(x) + *(*float32)(v.ptr) = float32(x) case Float64: - *(*float64)(v.val) = x + *(*float64)(v.ptr) = x } } @@ -1456,15 +1579,15 @@ func (v Value) SetInt(x int64) { default: panic(&ValueError{"reflect.Value.SetInt", k}) case Int: - *(*int)(v.val) = int(x) + *(*int)(v.ptr) = int(x) case Int8: - *(*int8)(v.val) = int8(x) + *(*int8)(v.ptr) = int8(x) case Int16: - *(*int16)(v.val) = int16(x) + *(*int16)(v.ptr) = int16(x) case Int32: - *(*int32)(v.val) = int32(x) + *(*int32)(v.ptr) = int32(x) case Int64: - *(*int64)(v.val) = x + *(*int64)(v.ptr) = x } } @@ -1474,7 +1597,7 @@ func (v Value) SetInt(x int64) { func (v Value) SetLen(n int) { v.mustBeAssignable() v.mustBe(Slice) - s := (*SliceHeader)(v.val) + s := (*sliceHeader)(v.ptr) if n < 0 || n > int(s.Cap) { panic("reflect: slice length out of range in SetLen") } @@ -1487,7 +1610,7 @@ func (v Value) SetLen(n int) { func (v Value) SetCap(n int) { v.mustBeAssignable() v.mustBe(Slice) - s := (*SliceHeader)(v.val) + s := (*sliceHeader)(v.ptr) if n < int(s.Len) || n > int(s.Cap) { panic("reflect: slice capacity out of range in SetCap") } @@ -1497,6 +1620,7 @@ func (v Value) SetCap(n int) { // SetMapIndex sets the value associated with key in the map v to val. // It panics if v's Kind is not Map. // If val is the zero Value, SetMapIndex deletes the key from the map. +// Otherwise if v holds a nil map, SetMapIndex will panic. // As in Go, key's value must be assignable to the map's key type, // and val's value must be assignable to the map's value type. func (v Value) SetMapIndex(key, val Value) { @@ -1505,11 +1629,29 @@ func (v Value) SetMapIndex(key, val Value) { key.mustBeExported() tt := (*mapType)(unsafe.Pointer(v.typ)) key = key.assignTo("reflect.Value.SetMapIndex", tt.key, nil) - if val.typ != nil { - val.mustBeExported() - val = val.assignTo("reflect.Value.SetMapIndex", tt.elem, nil) + var k unsafe.Pointer + if key.flag&flagIndir != 0 { + k = key.ptr + } else if key.typ.pointers() { + k = unsafe.Pointer(&key.ptr) + } else { + k = unsafe.Pointer(&key.scalar) + } + if val.typ == nil { + mapdelete(v.typ, v.pointer(), k) + return + } + val.mustBeExported() + val = val.assignTo("reflect.Value.SetMapIndex", tt.elem, nil) + var e unsafe.Pointer + if val.flag&flagIndir != 0 { + e = val.ptr + } else if val.typ.pointers() { + e = unsafe.Pointer(&val.ptr) + } else { + e = unsafe.Pointer(&val.scalar) } - mapassign(v.typ, v.iword(), key.iword(), val.iword(), val.typ != nil) + mapassign(v.typ, v.pointer(), k, e) } // SetUint sets v's underlying value to x. @@ -1520,17 +1662,17 @@ func (v Value) SetUint(x uint64) { default: panic(&ValueError{"reflect.Value.SetUint", k}) case Uint: - *(*uint)(v.val) = uint(x) + *(*uint)(v.ptr) = uint(x) case Uint8: - *(*uint8)(v.val) = uint8(x) + *(*uint8)(v.ptr) = uint8(x) case Uint16: - *(*uint16)(v.val) = uint16(x) + *(*uint16)(v.ptr) = uint16(x) case Uint32: - *(*uint32)(v.val) = uint32(x) + *(*uint32)(v.ptr) = uint32(x) case Uint64: - *(*uint64)(v.val) = x + *(*uint64)(v.ptr) = x case Uintptr: - *(*uintptr)(v.val) = uintptr(x) + *(*uintptr)(v.ptr) = uintptr(x) } } @@ -1539,7 +1681,7 @@ func (v Value) SetUint(x uint64) { func (v Value) SetPointer(x unsafe.Pointer) { v.mustBeAssignable() v.mustBe(UnsafePointer) - *(*unsafe.Pointer)(v.val) = x + *(*unsafe.Pointer)(v.ptr) = x } // SetString sets v's underlying value to x. @@ -1547,7 +1689,7 @@ func (v Value) SetPointer(x unsafe.Pointer) { func (v Value) SetString(x string) { v.mustBeAssignable() v.mustBe(String) - *(*string)(v.val) = x + *(*string)(v.ptr) = x } // Slice returns v[i:j]. @@ -1570,24 +1712,21 @@ func (v Value) Slice(i, j int) Value { tt := (*arrayType)(unsafe.Pointer(v.typ)) cap = int(tt.len) typ = (*sliceType)(unsafe.Pointer(tt.slice)) - base = v.val + base = v.ptr case Slice: typ = (*sliceType)(unsafe.Pointer(v.typ)) - s := (*SliceHeader)(v.val) + s := (*sliceHeader)(v.ptr) base = unsafe.Pointer(s.Data) cap = s.Cap case String: - s := (*StringHeader)(v.val) + s := (*stringHeader)(v.ptr) if i < 0 || j < i || j > s.Len { panic("reflect.Value.Slice: string slice index out of bounds") } - var x string - val := (*StringHeader)(unsafe.Pointer(&x)) - val.Data = s.Data + uintptr(i) - val.Len = j - i - return Value{v.typ, unsafe.Pointer(&x), v.flag} + t := stringHeader{unsafe.Pointer(uintptr(s.Data) + uintptr(i)), j - i} + return Value{v.typ, unsafe.Pointer(&t), 0, v.flag} } if i < 0 || j < i || j > cap { @@ -1597,14 +1736,14 @@ func (v Value) Slice(i, j int) Value { // Declare slice so that gc can see the base pointer in it. var x []unsafe.Pointer - // Reinterpret as *SliceHeader to edit. - s := (*SliceHeader)(unsafe.Pointer(&x)) - s.Data = uintptr(base) + uintptr(i)*typ.elem.Size() + // Reinterpret as *sliceHeader to edit. + s := (*sliceHeader)(unsafe.Pointer(&x)) + s.Data = unsafe.Pointer(uintptr(base) + uintptr(i)*typ.elem.Size()) s.Len = j - i s.Cap = cap - i fl := v.flag&flagRO | flagIndir | flag(Slice)<<flagKindShift - return Value{typ.common(), unsafe.Pointer(&x), fl} + return Value{typ.common(), unsafe.Pointer(&x), 0, fl} } // Slice3 is the 3-index form of the slice operation: it returns v[i:j:k]. @@ -1622,17 +1761,17 @@ func (v Value) Slice3(i, j, k int) Value { case Array: if v.flag&flagAddr == 0 { - panic("reflect.Value.Slice: slice of unaddressable array") + panic("reflect.Value.Slice3: slice of unaddressable array") } tt := (*arrayType)(unsafe.Pointer(v.typ)) cap = int(tt.len) typ = (*sliceType)(unsafe.Pointer(tt.slice)) - base = v.val + base = v.ptr case Slice: typ = (*sliceType)(unsafe.Pointer(v.typ)) - s := (*SliceHeader)(v.val) - base = unsafe.Pointer(s.Data) + s := (*sliceHeader)(v.ptr) + base = s.Data cap = s.Cap } @@ -1644,14 +1783,14 @@ func (v Value) Slice3(i, j, k int) Value { // can see the base pointer in it. var x []unsafe.Pointer - // Reinterpret as *SliceHeader to edit. - s := (*SliceHeader)(unsafe.Pointer(&x)) - s.Data = uintptr(base) + uintptr(i)*typ.elem.Size() + // Reinterpret as *sliceHeader to edit. + s := (*sliceHeader)(unsafe.Pointer(&x)) + s.Data = unsafe.Pointer(uintptr(base) + uintptr(i)*typ.elem.Size()) s.Len = j - i s.Cap = k - i fl := v.flag&flagRO | flagIndir | flag(Slice)<<flagKindShift - return Value{typ.common(), unsafe.Pointer(&x), fl} + return Value{typ.common(), unsafe.Pointer(&x), 0, fl} } // String returns the string v's underlying value, as a string. @@ -1663,7 +1802,7 @@ func (v Value) String() string { case Invalid: return "<invalid Value>" case String: - return *(*string)(v.val) + return *(*string)(v.ptr) } // If you call String on a reflect.Value of other type, it's better to // print something than to panic. Useful in debugging. @@ -1672,9 +1811,9 @@ func (v Value) String() string { // TryRecv attempts to receive a value from the channel v but will not block. // It panics if v's Kind is not Chan. -// If the receive cannot finish without blocking, x is the zero Value. -// The boolean ok is true if the value x corresponds to a send -// on the channel, false if it is a zero value received because the channel is closed. +// If the receive delivers a value, x is the transferred value and ok is true. +// If the receive cannot finish without blocking, x is the zero Value and ok is false. +// If the channel is closed, x is the zero value for the channel's element type and ok is false. func (v Value) TryRecv() (x Value, ok bool) { v.mustBe(Chan) v.mustBeExported() @@ -1729,11 +1868,11 @@ func (v Value) Uint() uint64 { k := v.kind() var p unsafe.Pointer if v.flag&flagIndir != 0 { - p = v.val + p = v.ptr } else { - // The escape analysis is good enough that &v.val + // The escape analysis is good enough that &v.scalar // does not trigger a heap allocation. - p = unsafe.Pointer(&v.val) + p = unsafe.Pointer(&v.scalar) } switch k { case Uint: @@ -1756,13 +1895,14 @@ func (v Value) Uint() uint64 { // It is for advanced clients that also import the "unsafe" package. // It panics if v is not addressable. func (v Value) UnsafeAddr() uintptr { + // TODO: deprecate if v.typ == nil { panic(&ValueError{"reflect.Value.UnsafeAddr", Invalid}) } if v.flag&flagAddr == 0 { panic("reflect.Value.UnsafeAddr of unaddressable value") } - return uintptr(v.val) + return uintptr(v.ptr) } // StringHeader is the runtime representation of a string. @@ -1776,6 +1916,12 @@ type StringHeader struct { Len int } +// stringHeader is a safe version of StringHeader used within this package. +type stringHeader struct { + Data unsafe.Pointer + Len int +} + // SliceHeader is the runtime representation of a slice. // It cannot be used safely or portably and its representation may // change in a later release. @@ -1788,6 +1934,13 @@ type SliceHeader struct { Cap int } +// sliceHeader is a safe version of SliceHeader used within this package. +type sliceHeader struct { + Data unsafe.Pointer + Len int + Cap int +} + func typesMustMatch(what string, t1, t2 Type) { if t1 != t2 { panic(what + ": " + t1.String() + " != " + t2.String()) @@ -1876,6 +2029,8 @@ func Copy(dst, src Value) int { // If sk is an in-line array, cannot take its address. // Instead, copy element by element. + // TODO: memmove would be ok for this (sa = unsafe.Pointer(&v.scalar)) + // if we teach the compiler that ptrs don't escape from memmove. if src.flag&flagIndir == 0 { for i := 0; i < n; i++ { dst.Index(i).Set(src.Index(i)) @@ -1886,14 +2041,14 @@ func Copy(dst, src Value) int { // Copy via memmove. var da, sa unsafe.Pointer if dk == Array { - da = dst.val + da = dst.ptr } else { - da = unsafe.Pointer((*SliceHeader)(dst.val).Data) + da = (*sliceHeader)(dst.ptr).Data } if sk == Array { - sa = src.val + sa = src.ptr } else { - sa = unsafe.Pointer((*SliceHeader)(src.val).Data) + sa = (*sliceHeader)(src.ptr).Data } memmove(da, sa, uintptr(n)*de.Size()) return n @@ -1902,17 +2057,18 @@ func Copy(dst, src Value) int { // A runtimeSelect is a single case passed to rselect. // This must match ../runtime/chan.c:/runtimeSelect type runtimeSelect struct { - dir uintptr // 0, SendDir, or RecvDir - typ *rtype // channel type - ch iword // interface word for channel - val iword // interface word for value (for SendDir) + dir uintptr // 0, SendDir, or RecvDir + typ *rtype // channel type + ch unsafe.Pointer // channel + val unsafe.Pointer // ptr to data (SendDir) or ptr to receive buffer (RecvDir) } -// rselect runs a select. It returns the index of the chosen case, -// and if the case was a receive, the interface word of the received -// value and the conventional OK bool to indicate whether the receive -// corresponds to a sent value. -func rselect([]runtimeSelect) (chosen int, recv iword, recvOK bool) +// rselect runs a select. It returns the index of the chosen case. +// If the case was a receive, val is filled in with the received value. +// The conventional OK bool indicates whether the receive corresponds +// to a sent value. +//go:noescape +func rselect([]runtimeSelect) (chosen int, recvOK bool) // A SelectDir describes the communication direction of a select case. type SelectDir int @@ -1992,7 +2148,7 @@ func Select(cases []SelectCase) (chosen int, recv Value, recvOK bool) { if ChanDir(tt.dir)&SendDir == 0 { panic("reflect.Select: SendDir case using recv-only channel") } - rc.ch = ch.iword() + rc.ch = ch.pointer() rc.typ = &tt.rtype v := c.Send if !v.IsValid() { @@ -2000,7 +2156,13 @@ func Select(cases []SelectCase) (chosen int, recv Value, recvOK bool) { } v.mustBeExported() v = v.assignTo("reflect.Select", tt.elem, nil) - rc.val = v.iword() + if v.flag&flagIndir != 0 { + rc.val = v.ptr + } else if v.typ.pointers() { + rc.val = unsafe.Pointer(&v.ptr) + } else { + rc.val = unsafe.Pointer(&v.scalar) + } case SelectRecv: if c.Send.IsValid() { @@ -2013,23 +2175,28 @@ func Select(cases []SelectCase) (chosen int, recv Value, recvOK bool) { ch.mustBe(Chan) ch.mustBeExported() tt := (*chanType)(unsafe.Pointer(ch.typ)) - rc.typ = &tt.rtype if ChanDir(tt.dir)&RecvDir == 0 { panic("reflect.Select: RecvDir case using send-only channel") } - rc.ch = ch.iword() + rc.ch = ch.pointer() + rc.typ = &tt.rtype + rc.val = unsafe_New(tt.elem) } } - chosen, word, recvOK := rselect(runcases) + chosen, recvOK = rselect(runcases) if runcases[chosen].dir == uintptr(SelectRecv) { tt := (*chanType)(unsafe.Pointer(runcases[chosen].typ)) - typ := tt.elem - fl := flag(typ.Kind()) << flagKindShift - if typ.size > ptrSize { - fl |= flagIndir + t := tt.elem + p := runcases[chosen].val + fl := flag(t.Kind()) << flagKindShift + if t.size > ptrSize { + recv = Value{t, p, 0, fl | flagIndir} + } else if t.pointers() { + recv = Value{t, *(*unsafe.Pointer)(p), 0, fl} + } else { + recv = Value{t, nil, loadScalar(p, t.size), fl} } - recv = Value{typ, unsafe.Pointer(word), fl} } return chosen, recv, recvOK } @@ -2058,16 +2225,8 @@ func MakeSlice(typ Type, len, cap int) Value { panic("reflect.MakeSlice: len > cap") } - // Declare slice so that gc can see the base pointer in it. - var x []unsafe.Pointer - - // Reinterpret as *SliceHeader to edit. - s := (*SliceHeader)(unsafe.Pointer(&x)) - s.Data = uintptr(unsafe_NewArray(typ.Elem().(*rtype), cap)) - s.Len = len - s.Cap = cap - - return Value{typ.common(), unsafe.Pointer(&x), flagIndir | flag(Slice)<<flagKindShift} + s := sliceHeader{unsafe_NewArray(typ.Elem().(*rtype), cap), len, cap} + return Value{typ.common(), unsafe.Pointer(&s), 0, flagIndir | flag(Slice)<<flagKindShift} } // MakeChan creates a new channel with the specified type and buffer size. @@ -2082,7 +2241,7 @@ func MakeChan(typ Type, buffer int) Value { panic("reflect.MakeChan: unidirectional channel type") } ch := makechan(typ.(*rtype), uint64(buffer)) - return Value{typ.common(), unsafe.Pointer(ch), flag(Chan) << flagKindShift} + return Value{typ.common(), ch, 0, flag(Chan) << flagKindShift} } // MakeMap creates a new map of the specified type. @@ -2091,7 +2250,7 @@ func MakeMap(typ Type) Value { panic("reflect.MakeMap of non-map type") } m := makemap(typ.(*rtype)) - return Value{typ.common(), unsafe.Pointer(m), flag(Map) << flagKindShift} + return Value{typ.common(), m, 0, flag(Map) << flagKindShift} } // Indirect returns the value that v points to. @@ -2112,21 +2271,13 @@ func ValueOf(i interface{}) Value { } // TODO(rsc): Eliminate this terrible hack. - // In the call to packValue, eface.typ doesn't escape, - // and eface.word is an integer. So it looks like - // i (= eface) doesn't escape. But really it does, - // because eface.word is actually a pointer. + // In the call to unpackEface, i.typ doesn't escape, + // and i.word is an integer. So it looks like + // i doesn't escape. But really it does, + // because i.word is actually a pointer. escapes(i) - // For an interface value with the noAddr bit set, - // the representation is identical to an empty interface. - eface := *(*emptyInterface)(unsafe.Pointer(&i)) - typ := eface.typ - fl := flag(typ.Kind()) << flagKindShift - if typ.size > ptrSize { - fl |= flagIndir - } - return Value{typ, unsafe.Pointer(eface.word), fl} + return unpackEface(i) } // Zero returns a Value representing the zero value for the specified type. @@ -2141,27 +2292,27 @@ func Zero(typ Type) Value { t := typ.common() fl := flag(t.Kind()) << flagKindShift if t.size <= ptrSize { - return Value{t, nil, fl} + return Value{t, nil, 0, fl} } - return Value{t, unsafe_New(typ.(*rtype)), fl | flagIndir} + return Value{t, unsafe_New(typ.(*rtype)), 0, fl | flagIndir} } // New returns a Value representing a pointer to a new zero value -// for the specified type. That is, the returned Value's Type is PtrTo(t). +// for the specified type. That is, the returned Value's Type is PtrTo(typ). func New(typ Type) Value { if typ == nil { panic("reflect: New(nil)") } ptr := unsafe_New(typ.(*rtype)) fl := flag(Ptr) << flagKindShift - return Value{typ.common().ptrTo(), ptr, fl} + return Value{typ.common().ptrTo(), ptr, 0, fl} } // NewAt returns a Value representing a pointer to a value of the // specified type, using p as that pointer. func NewAt(typ Type, p unsafe.Pointer) Value { fl := flag(Ptr) << flagKindShift - return Value{typ.common().ptrTo(), p, fl} + return Value{typ.common().ptrTo(), p, 0, fl} } // assignTo returns a value v that can be assigned directly to typ. @@ -2179,7 +2330,7 @@ func (v Value) assignTo(context string, dst *rtype, target *interface{}) Value { v.typ = dst fl := v.flag & (flagRO | flagAddr | flagIndir) fl |= flag(dst.Kind()) << flagKindShift - return Value{dst, v.val, fl} + return Value{dst, v.ptr, v.scalar, fl} case implements(dst, v.typ): if target == nil { @@ -2191,7 +2342,7 @@ func (v Value) assignTo(context string, dst *rtype, target *interface{}) Value { } else { ifaceE2I(dst, x, unsafe.Pointer(target)) } - return Value{dst, unsafe.Pointer(target), flagIndir | flag(Interface)<<flagKindShift} + return Value{dst, unsafe.Pointer(target), 0, flagIndir | flag(Interface)<<flagKindShift} } // Failed. @@ -2303,20 +2454,20 @@ func makeInt(f flag, bits uint64, t Type) Value { // Assume ptrSize >= 4, so this must be uint64. ptr := unsafe_New(typ) *(*uint64)(unsafe.Pointer(ptr)) = bits - return Value{typ, ptr, f | flagIndir | flag(typ.Kind())<<flagKindShift} + return Value{typ, ptr, 0, f | flagIndir | flag(typ.Kind())<<flagKindShift} } - var w iword + var s uintptr switch typ.size { case 1: - *(*uint8)(unsafe.Pointer(&w)) = uint8(bits) + *(*uint8)(unsafe.Pointer(&s)) = uint8(bits) case 2: - *(*uint16)(unsafe.Pointer(&w)) = uint16(bits) + *(*uint16)(unsafe.Pointer(&s)) = uint16(bits) case 4: - *(*uint32)(unsafe.Pointer(&w)) = uint32(bits) + *(*uint32)(unsafe.Pointer(&s)) = uint32(bits) case 8: - *(*uint64)(unsafe.Pointer(&w)) = uint64(bits) + *(*uint64)(unsafe.Pointer(&s)) = uint64(bits) } - return Value{typ, unsafe.Pointer(w), f | flag(typ.Kind())<<flagKindShift} + return Value{typ, nil, s, f | flag(typ.Kind())<<flagKindShift} } // makeFloat returns a Value of type t equal to v (possibly truncated to float32), @@ -2327,17 +2478,17 @@ func makeFloat(f flag, v float64, t Type) Value { // Assume ptrSize >= 4, so this must be float64. ptr := unsafe_New(typ) *(*float64)(unsafe.Pointer(ptr)) = v - return Value{typ, ptr, f | flagIndir | flag(typ.Kind())<<flagKindShift} + return Value{typ, ptr, 0, f | flagIndir | flag(typ.Kind())<<flagKindShift} } - var w iword + var s uintptr switch typ.size { case 4: - *(*float32)(unsafe.Pointer(&w)) = float32(v) + *(*float32)(unsafe.Pointer(&s)) = float32(v) case 8: - *(*float64)(unsafe.Pointer(&w)) = v + *(*float64)(unsafe.Pointer(&s)) = v } - return Value{typ, unsafe.Pointer(w), f | flag(typ.Kind())<<flagKindShift} + return Value{typ, nil, s, f | flag(typ.Kind())<<flagKindShift} } // makeComplex returns a Value of type t equal to v (possibly truncated to complex64), @@ -2352,13 +2503,13 @@ func makeComplex(f flag, v complex128, t Type) Value { case 16: *(*complex128)(unsafe.Pointer(ptr)) = v } - return Value{typ, ptr, f | flagIndir | flag(typ.Kind())<<flagKindShift} + return Value{typ, ptr, 0, f | flagIndir | flag(typ.Kind())<<flagKindShift} } // Assume ptrSize <= 8 so this must be complex64. - var w iword - *(*complex64)(unsafe.Pointer(&w)) = complex64(v) - return Value{typ, unsafe.Pointer(w), f | flag(typ.Kind())<<flagKindShift} + var s uintptr + *(*complex64)(unsafe.Pointer(&s)) = complex64(v) + return Value{typ, nil, s, f | flag(typ.Kind())<<flagKindShift} } func makeString(f flag, v string, t Type) Value { @@ -2461,15 +2612,15 @@ func cvtStringRunes(v Value, t Type) Value { func cvtDirect(v Value, typ Type) Value { f := v.flag t := typ.common() - val := v.val + ptr := v.ptr if f&flagAddr != 0 { // indirect, mutable word - make a copy - ptr := unsafe_New(t) - memmove(ptr, val, t.size) - val = ptr + c := unsafe_New(t) + memmove(c, ptr, t.size) + ptr = c f &^= flagAddr } - return Value{t, val, v.flag&flagRO | f} + return Value{t, ptr, v.scalar, v.flag&flagRO | f} // v.flag&flagRO|f == f? } // convertOp: concrete -> interface @@ -2481,7 +2632,7 @@ func cvtT2I(v Value, typ Type) Value { } else { ifaceE2I(typ.(*rtype), x, unsafe.Pointer(target)) } - return Value{typ.common(), unsafe.Pointer(target), v.flag&flagRO | flagIndir | flag(Interface)<<flagKindShift} + return Value{typ.common(), unsafe.Pointer(target), 0, v.flag&flagRO | flagIndir | flag(Interface)<<flagKindShift} } // convertOp: interface -> interface @@ -2495,22 +2646,27 @@ func cvtI2I(v Value, typ Type) Value { } // implemented in ../pkg/runtime -func chancap(ch iword) int -func chanclose(ch iword) -func chanlen(ch iword) int -func chanrecv(t *rtype, ch iword, nb bool) (val iword, selected, received bool) -func chansend(t *rtype, ch iword, val iword, nb bool) bool - -func makechan(typ *rtype, size uint64) (ch iword) -func makemap(t *rtype) (m iword) -func mapaccess(t *rtype, m iword, key iword) (val iword, ok bool) -func mapassign(t *rtype, m iword, key, val iword, ok bool) -func mapiterinit(t *rtype, m iword) *byte -func mapiterkey(it *byte) (key iword, ok bool) -func mapiternext(it *byte) -func maplen(m iword) int - -func call(fn, arg unsafe.Pointer, n uint32) +func chancap(ch unsafe.Pointer) int +func chanclose(ch unsafe.Pointer) +func chanlen(ch unsafe.Pointer) int + +//go:noescape +func chanrecv(t *rtype, ch unsafe.Pointer, nb bool, val unsafe.Pointer) (selected, received bool) + +//go:noescape +func chansend(t *rtype, ch unsafe.Pointer, val unsafe.Pointer, nb bool) bool + +func makechan(typ *rtype, size uint64) (ch unsafe.Pointer) +func makemap(t *rtype) (m unsafe.Pointer) +func mapaccess(t *rtype, m unsafe.Pointer, key unsafe.Pointer) (val unsafe.Pointer) +func mapassign(t *rtype, m unsafe.Pointer, key, val unsafe.Pointer) +func mapdelete(t *rtype, m unsafe.Pointer, key unsafe.Pointer) +func mapiterinit(t *rtype, m unsafe.Pointer) unsafe.Pointer +func mapiterkey(it unsafe.Pointer) (key unsafe.Pointer) +func mapiternext(it unsafe.Pointer) +func maplen(m unsafe.Pointer) int + +func call(fn, arg unsafe.Pointer, n uint32, retoffset uint32) func ifaceE2I(t *rtype, src interface{}, dst unsafe.Pointer) // Dummy annotation marking that the value x escapes, |