Commit cf69867a authored by Josh Bleecher Snyder's avatar Josh Bleecher Snyder

runtime: speed up key copying in specialized evacuate routines

Similar to CL 59110.

Change-Id: Ia2858541c86a44b105eacbca9a46b1044632c5ca
Reviewed-on: https://go-review.googlesource.com/59134
Run-TryBot: Josh Bleecher Snyder <josharian@gmail.com>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: default avatarKeith Randall <khr@golang.org>
parent 0480bfe5
...@@ -838,7 +838,14 @@ func evacuate_fast32(t *maptype, h *hmap, oldbucket uintptr) { ...@@ -838,7 +838,14 @@ func evacuate_fast32(t *maptype, h *hmap, oldbucket uintptr) {
dst.v = add(dst.k, bucketCnt*4) dst.v = add(dst.k, bucketCnt*4)
} }
dst.b.tophash[dst.i&(bucketCnt-1)] = top // mask dst.i as an optimization, to avoid a bounds check dst.b.tophash[dst.i&(bucketCnt-1)] = top // mask dst.i as an optimization, to avoid a bounds check
typedmemmove(t.key, dst.k, k) // copy value
// Copy key.
if sys.PtrSize == 4 && t.key.kind&kindNoPointers == 0 && writeBarrier.enabled {
writebarrierptr((*uintptr)(dst.k), *(*uintptr)(k))
} else {
*(*uint32)(dst.k) = *(*uint32)(k)
}
if t.indirectvalue { if t.indirectvalue {
*(*unsafe.Pointer)(dst.v) = *(*unsafe.Pointer)(v) *(*unsafe.Pointer)(dst.v) = *(*unsafe.Pointer)(v)
} else { } else {
...@@ -935,7 +942,20 @@ func evacuate_fast64(t *maptype, h *hmap, oldbucket uintptr) { ...@@ -935,7 +942,20 @@ func evacuate_fast64(t *maptype, h *hmap, oldbucket uintptr) {
dst.v = add(dst.k, bucketCnt*8) dst.v = add(dst.k, bucketCnt*8)
} }
dst.b.tophash[dst.i&(bucketCnt-1)] = top // mask dst.i as an optimization, to avoid a bounds check dst.b.tophash[dst.i&(bucketCnt-1)] = top // mask dst.i as an optimization, to avoid a bounds check
typedmemmove(t.key, dst.k, k) // copy value
// Copy key.
if t.key.kind&kindNoPointers == 0 && writeBarrier.enabled {
if sys.PtrSize == 8 {
writebarrierptr((*uintptr)(dst.k), *(*uintptr)(k))
} else {
// There are three ways to squeeze at least one 32 bit pointer into 64 bits.
// Give up and call typedmemmove.
typedmemmove(t.key, dst.k, k)
}
} else {
*(*uint64)(dst.k) = *(*uint64)(k)
}
if t.indirectvalue { if t.indirectvalue {
*(*unsafe.Pointer)(dst.v) = *(*unsafe.Pointer)(v) *(*unsafe.Pointer)(dst.v) = *(*unsafe.Pointer)(v)
} else { } else {
...@@ -1032,7 +1052,10 @@ func evacuate_faststr(t *maptype, h *hmap, oldbucket uintptr) { ...@@ -1032,7 +1052,10 @@ func evacuate_faststr(t *maptype, h *hmap, oldbucket uintptr) {
dst.v = add(dst.k, bucketCnt*2*sys.PtrSize) dst.v = add(dst.k, bucketCnt*2*sys.PtrSize)
} }
dst.b.tophash[dst.i&(bucketCnt-1)] = top // mask dst.i as an optimization, to avoid a bounds check dst.b.tophash[dst.i&(bucketCnt-1)] = top // mask dst.i as an optimization, to avoid a bounds check
typedmemmove(t.key, dst.k, k) // copy value
// Copy key.
*(*string)(dst.k) = *(*string)(k)
if t.indirectvalue { if t.indirectvalue {
*(*unsafe.Pointer)(dst.v) = *(*unsafe.Pointer)(v) *(*unsafe.Pointer)(dst.v) = *(*unsafe.Pointer)(v)
} else { } else {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment