Commit 7fdec621 authored by Russ Cox's avatar Russ Cox

build: enable framepointer mode by default

This has a minor performance cost, but far less than is being gained by SSA.
As an experiment, enable it during the Go 1.7 beta.
Having frame pointers on by default makes Linux's perf, Intel VTune,
and other profilers much more useful, because it lets them gather a
stack trace efficiently on profiling events.
(It doesn't help us that much, since when we walk the stack we usually
need to look up PC-specific information as well.)

Fixes #15840.

Change-Id: I4efd38412a0de4a9c87b1b6e5d11c301e63f1a2a
Reviewed-on: https://go-review.googlesource.com/23451
Run-TryBot: Russ Cox <rsc@golang.org>
Reviewed-by: default avatarAustin Clements <austin@google.com>
TryBot-Result: Gobot Gobot <gobot@golang.org>
parent 2168f2a6
......@@ -25,18 +25,16 @@ func betypeinit() {
cmpptr = x86.ACMPL
}
if gc.Ctxt.Flag_dynlink {
gc.Thearch.ReservedRegs = append(gc.Thearch.ReservedRegs, x86.REG_R15)
if gc.Ctxt.Flag_dynlink || obj.Getgoos() == "nacl" {
resvd = append(resvd, x86.REG_R15)
}
}
func Main() {
if obj.Getgoos() == "nacl" {
resvd = append(resvd, x86.REG_BP, x86.REG_R15)
} else if obj.Framepointer_enabled != 0 {
if gc.Ctxt.Framepointer_enabled || obj.Getgoos() == "nacl" {
resvd = append(resvd, x86.REG_BP)
}
gc.Thearch.ReservedRegs = resvd
}
func Main() {
gc.Thearch.LinkArch = &x86.Linkamd64
if obj.Getgoarch() == "amd64p32" {
gc.Thearch.LinkArch = &x86.Linkamd64p32
......@@ -51,7 +49,6 @@ func Main() {
gc.Thearch.FREGMIN = x86.REG_X0
gc.Thearch.FREGMAX = x86.REG_X15
gc.Thearch.MAXWIDTH = 1 << 50
gc.Thearch.ReservedRegs = resvd
gc.Thearch.AddIndex = addindex
gc.Thearch.Betypeinit = betypeinit
......
......@@ -32,7 +32,6 @@ package amd64
import (
"cmd/compile/internal/gc"
"cmd/internal/obj"
"cmd/internal/obj/x86"
)
......@@ -121,7 +120,7 @@ func BtoR(b uint64) int {
b &= 0xffff
if gc.Nacl {
b &^= (1<<(x86.REG_BP-x86.REG_AX) | 1<<(x86.REG_R15-x86.REG_AX))
} else if obj.Framepointer_enabled != 0 {
} else if gc.Ctxt.Framepointer_enabled {
// BP is part of the calling convention if framepointer_enabled.
b &^= (1 << (x86.REG_BP - x86.REG_AX))
}
......
......@@ -106,7 +106,6 @@
package ssa
import (
"cmd/internal/obj"
"fmt"
"unsafe"
)
......@@ -456,7 +455,7 @@ func (s *regAllocState) init(f *Func) {
s.allocatable = regMask(1)<<s.numRegs - 1
s.allocatable &^= 1 << s.SPReg
s.allocatable &^= 1 << s.SBReg
if obj.Framepointer_enabled != 0 {
if s.f.Config.ctxt.Framepointer_enabled {
s.allocatable &^= 1 << 5 // BP
}
if s.f.Config.ctxt.Flag_dynlink {
......
......@@ -13,7 +13,7 @@ import (
// go-specific code shared across loaders (5l, 6l, 8l).
var (
Framepointer_enabled int
framepointer_enabled int
Fieldtrack_enabled int
)
......@@ -26,14 +26,21 @@ var exper = []struct {
val *int
}{
{"fieldtrack", &Fieldtrack_enabled},
{"framepointer", &Framepointer_enabled},
{"framepointer", &framepointer_enabled},
}
func addexp(s string) {
// Could do general integer parsing here, but the runtime copy doesn't yet.
v := 1
name := s
if len(name) > 2 && name[:2] == "no" {
v = 0
name = name[2:]
}
for i := 0; i < len(exper); i++ {
if exper[i].name == s {
if exper[i].name == name {
if exper[i].val != nil {
*exper[i].val = 1
*exper[i].val = v
}
return
}
......@@ -44,6 +51,7 @@ func addexp(s string) {
}
func init() {
framepointer_enabled = 1 // default
for _, f := range strings.Split(goexperiment, ",") {
if f != "" {
addexp(f)
......@@ -51,6 +59,10 @@ func init() {
}
}
func Framepointer_enabled(goos, goarch string) bool {
return framepointer_enabled != 0 && goarch == "amd64" && goos != "nacl"
}
func Nopout(p *Prog) {
p.As = ANOP
p.Scond = 0
......
......@@ -664,6 +664,8 @@ type Link struct {
Etextp *LSym
Errors int
Framepointer_enabled bool
// state for writing objects
Text []*LSym
Data []*LSym
......
......@@ -106,6 +106,7 @@ func Linknew(arch *LinkArch) *Link {
}
ctxt.Flag_optimize = true
ctxt.Framepointer_enabled = Framepointer_enabled(Getgoos(), arch.Name)
return ctxt
}
......
......@@ -3765,7 +3765,7 @@ func doasm(ctxt *obj.Link, p *obj.Prog) {
ctxt.Diag("directly calling duff when dynamically linking Go")
}
if obj.Framepointer_enabled != 0 && yt.zcase == Zcallduff && p.Mode == 64 {
if ctxt.Framepointer_enabled && yt.zcase == Zcallduff && p.Mode == 64 {
// Maintain BP around call, since duffcopy/duffzero can't do it
// (the call jumps into the middle of the function).
// This makes it possible to see call sites for duffcopy/duffzero in
......@@ -3784,7 +3784,7 @@ func doasm(ctxt *obj.Link, p *obj.Prog) {
r.Siz = 4
ctxt.AsmBuf.PutInt32(0)
if obj.Framepointer_enabled != 0 && yt.zcase == Zcallduff && p.Mode == 64 {
if ctxt.Framepointer_enabled && yt.zcase == Zcallduff && p.Mode == 64 {
// Pop BP pushed above.
// MOVQ 0(BP), BP
ctxt.AsmBuf.Put(bpduff2)
......
......@@ -610,7 +610,7 @@ func preprocess(ctxt *obj.Link, cursym *obj.LSym) {
}
var bpsize int
if p.Mode == 64 && obj.Framepointer_enabled != 0 && autoffset > 0 && p.From3.Offset&obj.NOFRAME == 0 {
if p.Mode == 64 && ctxt.Framepointer_enabled && autoffset > 0 && p.From3.Offset&obj.NOFRAME == 0 {
// Make room for to save a base pointer. If autoffset == 0,
// this might do something special like a tail jump to
// another function, so in that case we omit this.
......
......@@ -1558,7 +1558,7 @@ func writelines(prev *LSym) *LSym {
if !haslinkregister() {
offs -= int64(SysArch.PtrSize)
}
if obj.Framepointer_enabled != 0 {
if obj.Framepointer_enabled(obj.Getgoos(), obj.Getgoarch()) {
// The frame pointer is saved
// between the CFA and the
// autos.
......
......@@ -639,11 +639,17 @@ func loadlib() {
// recording the value of GOARM.
if SysArch.Family == sys.ARM {
s := Linklookup(Ctxt, "runtime.goarm", 0)
s.Type = obj.SRODATA
s.Size = 0
Adduint8(Ctxt, s, uint8(Ctxt.Goarm))
}
if obj.Framepointer_enabled(obj.Getgoos(), obj.Getgoarch()) {
s := Linklookup(Ctxt, "runtime.framepointer_enabled", 0)
s.Type = obj.SRODATA
s.Size = 0
Adduint8(Ctxt, s, 1)
}
} else {
// If OTOH the module does not contain the runtime package,
// create a local symbol for the moduledata.
......
......@@ -434,9 +434,6 @@ func schedinit() {
sched.maxmcount = 10000
// Cache the framepointer experiment. This affects stack unwinding.
framepointer_enabled = haveexperiment("framepointer")
tracebackinit()
moduledataverify()
stackinit()
......@@ -4163,6 +4160,9 @@ func setMaxThreads(in int) (out int) {
}
func haveexperiment(name string) bool {
if name == "framepointer" {
return framepointer_enabled // set by linker
}
x := sys.Goexperiment
for x != "" {
xname := ""
......@@ -4175,6 +4175,9 @@ func haveexperiment(name string) bool {
if xname == name {
return true
}
if len(xname) > 2 && xname[:2] == "no" && xname[2:] == name {
return false
}
}
return false
}
......
......@@ -725,7 +725,8 @@ var (
support_avx bool
support_avx2 bool
goarm uint8 // set by cmd/link on arm systems
goarm uint8 // set by cmd/link on arm systems
framepointer_enabled bool // set by cmd/link
)
// Set by the linker so the runtime can determine the buildmode.
......
......@@ -155,9 +155,6 @@ var stackLarge struct {
free [_MHeapMap_Bits]mSpanList // free lists by log_2(s.npages)
}
// Cached value of haveexperiment("framepointer")
var framepointer_enabled bool
func stackinit() {
if _StackCacheSize&_PageMask != 0 {
throw("cache size must be a multiple of page size")
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment