Commit 2075a932 authored by Heschi Kreinick's avatar Heschi Kreinick

cmd/compile: reimplement location list generation

Completely redesign and reimplement location list generation to be more
efficient, and hopefully not too hard to understand.

RegKills are gone. Instead of using the regalloc's liveness
calculations, redo them using the Ops' clobber information. Besides
saving a lot of Values, this avoids adding RegKills to blocks that would
be empty otherwise, which was messing up optimizations. This does mean
that it's much harder to tell whether the generation process is buggy
(there's nothing to cross-check it with), and there may be disagreements
with GC liveness. But the performance gain is significant, and it's nice
not to be messing with earlier compiler phases.

The intermediate representations are gone. Instead of producing
ssa.BlockDebugs, then dwarf.LocationLists, and then finally real
location lists, go directly from the SSA to a (mostly) real location
list. Because the SSA analysis happens before assembly, it stores
encoded block/value IDs where PCs would normally go. It would be easier
to do the SSA analysis after assembly, but I didn't want to retain the
SSA just for that.

Generation proceeds in two phases: first, it traverses the function in
CFG order, storing the state of the block at the beginning and end. End
states are used to produce the start states of the successor blocks. In
the second phase, it traverses in program text order and produces the
location lists. The processing in the second phase is redundant, but
much cheaper than storing the intermediate representation. It might be
possible to combine the two phases somewhat to take advantage of cases
where the CFG matches the block layout, but I haven't tried.

Location lists are finalized by adding a base address selection entry,
translating each encoded block/value ID to a real PC, and adding the
terminating zero entry. This probably won't work on OSX, where dsymutil
will choke on the base address selection. I tried emitting CU-relative
relocations for each address, and it was *very* bad for performance --
it uses more memory storing all the relocations than it does for the
actual location list bytes. I think I'm going to end up synthesizing the
relocations in the linker only on OSX, but TBD.

TestNexting needs updating: with more optimizations working, the
debugger doesn't stop on the continue (line 88) any more, and the test's
duplicate suppression kicks in. Also, dx and dy live a little longer
now, but they have the correct values.

Change-Id: Ie772dfe23a4e389ca573624fac4d05401ae32307
Reviewed-on: https://go-review.googlesource.com/89356
Run-TryBot: Heschi Kreinick <heschi@google.com>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: default avatarDavid Chase <drchase@google.com>
parent 7d7af610
...@@ -572,13 +572,12 @@ var knownFormats = map[string]string{ ...@@ -572,13 +572,12 @@ var knownFormats = map[string]string{
"*cmd/compile/internal/ssa.Block %v": "", "*cmd/compile/internal/ssa.Block %v": "",
"*cmd/compile/internal/ssa.Func %s": "", "*cmd/compile/internal/ssa.Func %s": "",
"*cmd/compile/internal/ssa.Func %v": "", "*cmd/compile/internal/ssa.Func %v": "",
"*cmd/compile/internal/ssa.LocalSlot %+v": "",
"*cmd/compile/internal/ssa.LocalSlot %v": "", "*cmd/compile/internal/ssa.LocalSlot %v": "",
"*cmd/compile/internal/ssa.Register %s": "", "*cmd/compile/internal/ssa.Register %s": "",
"*cmd/compile/internal/ssa.Register %v": "",
"*cmd/compile/internal/ssa.SparseTreeNode %v": "", "*cmd/compile/internal/ssa.SparseTreeNode %v": "",
"*cmd/compile/internal/ssa.Value %s": "", "*cmd/compile/internal/ssa.Value %s": "",
"*cmd/compile/internal/ssa.Value %v": "", "*cmd/compile/internal/ssa.Value %v": "",
"*cmd/compile/internal/ssa.VarLoc %v": "",
"*cmd/compile/internal/ssa.sparseTreeMapEntry %v": "", "*cmd/compile/internal/ssa.sparseTreeMapEntry %v": "",
"*cmd/compile/internal/types.Field %p": "", "*cmd/compile/internal/types.Field %p": "",
"*cmd/compile/internal/types.Field %v": "", "*cmd/compile/internal/types.Field %v": "",
...@@ -597,7 +596,6 @@ var knownFormats = map[string]string{ ...@@ -597,7 +596,6 @@ var knownFormats = map[string]string{
"*cmd/compile/internal/types.Type %p": "", "*cmd/compile/internal/types.Type %p": "",
"*cmd/compile/internal/types.Type %s": "", "*cmd/compile/internal/types.Type %s": "",
"*cmd/compile/internal/types.Type %v": "", "*cmd/compile/internal/types.Type %v": "",
"*cmd/internal/dwarf.Location %#v": "",
"*cmd/internal/obj.Addr %v": "", "*cmd/internal/obj.Addr %v": "",
"*cmd/internal/obj.LSym %v": "", "*cmd/internal/obj.LSym %v": "",
"*math/big.Int %#x": "", "*math/big.Int %#x": "",
...@@ -605,13 +603,12 @@ var knownFormats = map[string]string{ ...@@ -605,13 +603,12 @@ var knownFormats = map[string]string{
"[16]byte %x": "", "[16]byte %x": "",
"[]*cmd/compile/internal/gc.Node %v": "", "[]*cmd/compile/internal/gc.Node %v": "",
"[]*cmd/compile/internal/gc.Sig %#v": "", "[]*cmd/compile/internal/gc.Sig %#v": "",
"[]*cmd/compile/internal/ssa.Block %v": "",
"[]*cmd/compile/internal/ssa.Value %v": "", "[]*cmd/compile/internal/ssa.Value %v": "",
"[][]cmd/compile/internal/ssa.SlotID %v": "",
"[]byte %s": "", "[]byte %s": "",
"[]byte %x": "", "[]byte %x": "",
"[]cmd/compile/internal/ssa.Edge %v": "", "[]cmd/compile/internal/ssa.Edge %v": "",
"[]cmd/compile/internal/ssa.ID %v": "", "[]cmd/compile/internal/ssa.ID %v": "",
"[]cmd/compile/internal/ssa.VarLocList %v": "",
"[]cmd/compile/internal/syntax.token %s": "", "[]cmd/compile/internal/syntax.token %s": "",
"[]string %v": "", "[]string %v": "",
"bool %v": "", "bool %v": "",
...@@ -637,18 +634,17 @@ var knownFormats = map[string]string{ ...@@ -637,18 +634,17 @@ var knownFormats = map[string]string{
"cmd/compile/internal/gc.Val %v": "", "cmd/compile/internal/gc.Val %v": "",
"cmd/compile/internal/gc.fmtMode %d": "", "cmd/compile/internal/gc.fmtMode %d": "",
"cmd/compile/internal/gc.initKind %d": "", "cmd/compile/internal/gc.initKind %d": "",
"cmd/compile/internal/gc.locID %v": "",
"cmd/compile/internal/ssa.BranchPrediction %d": "", "cmd/compile/internal/ssa.BranchPrediction %d": "",
"cmd/compile/internal/ssa.Edge %v": "", "cmd/compile/internal/ssa.Edge %v": "",
"cmd/compile/internal/ssa.GCNode %v": "", "cmd/compile/internal/ssa.GCNode %v": "",
"cmd/compile/internal/ssa.ID %d": "", "cmd/compile/internal/ssa.ID %d": "",
"cmd/compile/internal/ssa.ID %v": "", "cmd/compile/internal/ssa.ID %v": "",
"cmd/compile/internal/ssa.LocalSlot %s": "", "cmd/compile/internal/ssa.LocalSlot %s": "",
"cmd/compile/internal/ssa.LocalSlot %v": "",
"cmd/compile/internal/ssa.Location %s": "", "cmd/compile/internal/ssa.Location %s": "",
"cmd/compile/internal/ssa.Op %s": "", "cmd/compile/internal/ssa.Op %s": "",
"cmd/compile/internal/ssa.Op %v": "", "cmd/compile/internal/ssa.Op %v": "",
"cmd/compile/internal/ssa.ValAndOff %s": "", "cmd/compile/internal/ssa.ValAndOff %s": "",
"cmd/compile/internal/ssa.VarLocList %v": "",
"cmd/compile/internal/ssa.rbrank %d": "", "cmd/compile/internal/ssa.rbrank %d": "",
"cmd/compile/internal/ssa.regMask %d": "", "cmd/compile/internal/ssa.regMask %d": "",
"cmd/compile/internal/ssa.register %d": "", "cmd/compile/internal/ssa.register %d": "",
...@@ -663,7 +659,6 @@ var knownFormats = map[string]string{ ...@@ -663,7 +659,6 @@ var knownFormats = map[string]string{
"cmd/compile/internal/types.EType %d": "", "cmd/compile/internal/types.EType %d": "",
"cmd/compile/internal/types.EType %s": "", "cmd/compile/internal/types.EType %s": "",
"cmd/compile/internal/types.EType %v": "", "cmd/compile/internal/types.EType %v": "",
"cmd/internal/dwarf.Location %#v": "",
"cmd/internal/src.Pos %s": "", "cmd/internal/src.Pos %s": "",
"cmd/internal/src.Pos %v": "", "cmd/internal/src.Pos %v": "",
"error %v": "", "error %v": "",
......
This diff is collapsed.
...@@ -4652,15 +4652,14 @@ func genssa(f *ssa.Func, pp *Progs) { ...@@ -4652,15 +4652,14 @@ func genssa(f *ssa.Func, pp *Progs) {
s.ScratchFpMem = e.scratchFpMem s.ScratchFpMem = e.scratchFpMem
logLocationLists := Debug_locationlist != 0
if Ctxt.Flag_locationlists { if Ctxt.Flag_locationlists {
e.curfn.Func.DebugInfo = ssa.BuildFuncDebug(f, logLocationLists)
valueToProgAfter = make([]*obj.Prog, f.NumValues()) valueToProgAfter = make([]*obj.Prog, f.NumValues())
} }
// Emit basic blocks // Emit basic blocks
for i, b := range f.Blocks { for i, b := range f.Blocks {
s.bstart[b.ID] = s.pp.next s.bstart[b.ID] = s.pp.next
// Emit values in block // Emit values in block
thearch.SSAMarkMoves(&s, b) thearch.SSAMarkMoves(&s, b)
for _, v := range b.Values { for _, v := range b.Values {
...@@ -4698,8 +4697,6 @@ func genssa(f *ssa.Func, pp *Progs) { ...@@ -4698,8 +4697,6 @@ func genssa(f *ssa.Func, pp *Progs) {
} }
case ssa.OpPhi: case ssa.OpPhi:
CheckLoweredPhi(v) CheckLoweredPhi(v)
case ssa.OpRegKill:
// nothing to do
default: default:
// let the backend handle it // let the backend handle it
thearch.SSAGenValue(&s, v) thearch.SSAGenValue(&s, v)
...@@ -4708,12 +4705,14 @@ func genssa(f *ssa.Func, pp *Progs) { ...@@ -4708,12 +4705,14 @@ func genssa(f *ssa.Func, pp *Progs) {
if Ctxt.Flag_locationlists { if Ctxt.Flag_locationlists {
valueToProgAfter[v.ID] = s.pp.next valueToProgAfter[v.ID] = s.pp.next
} }
if logProgs { if logProgs {
for ; x != s.pp.next; x = x.Link { for ; x != s.pp.next; x = x.Link {
progToValue[x] = v progToValue[x] = v
} }
} }
} }
// Emit control flow instructions for block // Emit control flow instructions for block
var next *ssa.Block var next *ssa.Block
if i < len(f.Blocks)-1 && Debug['N'] == 0 { if i < len(f.Blocks)-1 && Debug['N'] == 0 {
...@@ -4734,41 +4733,19 @@ func genssa(f *ssa.Func, pp *Progs) { ...@@ -4734,41 +4733,19 @@ func genssa(f *ssa.Func, pp *Progs) {
} }
if Ctxt.Flag_locationlists { if Ctxt.Flag_locationlists {
for i := range f.Blocks { e.curfn.Func.DebugInfo = ssa.BuildFuncDebug(Ctxt, f, Debug_locationlist > 1, stackOffset)
blockDebug := e.curfn.Func.DebugInfo.Blocks[i] bstart := s.bstart
for _, locList := range blockDebug.Variables { // Note that at this moment, Prog.Pc is a sequence number; it's
for _, loc := range locList.Locations { // not a real PC until after assembly, so this mapping has to
if loc.Start == ssa.BlockStart { // be done later.
loc.StartProg = s.bstart[f.Blocks[i].ID] e.curfn.Func.DebugInfo.GetPC = func(b, v ssa.ID) int64 {
} else { switch v {
loc.StartProg = valueToProgAfter[loc.Start.ID] case ssa.BlockStart.ID:
} return int64(bstart[b].Pc)
if loc.End == nil { case ssa.BlockEnd.ID:
Fatalf("empty loc %v compiling %v", loc, f.Name) return int64(e.curfn.Func.lsym.Size)
} default:
return int64(valueToProgAfter[v].Pc)
if loc.End == ssa.BlockEnd {
// If this variable was live at the end of the block, it should be
// live over the control flow instructions. Extend it up to the
// beginning of the next block.
// If this is the last block, then there's no Prog to use for it, and
// EndProg is unset.
if i < len(f.Blocks)-1 {
loc.EndProg = s.bstart[f.Blocks[i+1].ID]
}
} else {
// Advance the "end" forward by one; the end-of-range doesn't take effect
// until the instruction actually executes.
loc.EndProg = valueToProgAfter[loc.End.ID].Link
if loc.EndProg == nil {
Fatalf("nil loc.EndProg compiling %v, loc=%v", f.Name, loc)
}
}
if !logLocationLists {
loc.Start = nil
loc.End = nil
}
}
} }
} }
} }
......
...@@ -14,11 +14,6 @@ type Cache struct { ...@@ -14,11 +14,6 @@ type Cache struct {
blocks [200]Block blocks [200]Block
locs [2000]Location locs [2000]Location
// Storage for DWARF variable locations. Lazily allocated
// since location lists are off by default.
varLocs []VarLoc
curVarLoc int
// Reusable stackAllocState. // Reusable stackAllocState.
// See stackalloc.go's {new,put}StackAllocState. // See stackalloc.go's {new,put}StackAllocState.
stackAllocState *stackAllocState stackAllocState *stackAllocState
...@@ -43,21 +38,4 @@ func (c *Cache) Reset() { ...@@ -43,21 +38,4 @@ func (c *Cache) Reset() {
for i := range xl { for i := range xl {
xl[i] = nil xl[i] = nil
} }
xvl := c.varLocs[:c.curVarLoc]
for i := range xvl {
xvl[i] = VarLoc{}
}
c.curVarLoc = 0
}
func (c *Cache) NewVarLoc() *VarLoc {
if c.varLocs == nil {
c.varLocs = make([]VarLoc, 4000)
}
if c.curVarLoc == len(c.varLocs) {
return &VarLoc{}
}
vl := &c.varLocs[c.curVarLoc]
c.curVarLoc++
return vl
} }
...@@ -465,10 +465,6 @@ func memCheck(f *Func) { ...@@ -465,10 +465,6 @@ func memCheck(f *Func) {
if seenNonPhi { if seenNonPhi {
f.Fatalf("phi after non-phi @ %s: %s", b, v) f.Fatalf("phi after non-phi @ %s: %s", b, v)
} }
case OpRegKill:
if f.RegAlloc == nil {
f.Fatalf("RegKill seen before register allocation @ %s: %s", b, v)
}
default: default:
seenNonPhi = true seenNonPhi = true
} }
......
This diff is collapsed.
...@@ -450,7 +450,6 @@ var genericOps = []opData{ ...@@ -450,7 +450,6 @@ var genericOps = []opData{
{name: "VarKill", argLength: 1, aux: "Sym", symEffect: "None"}, // aux is a *gc.Node of a variable that is known to be dead. arg0=mem, returns mem {name: "VarKill", argLength: 1, aux: "Sym", symEffect: "None"}, // aux is a *gc.Node of a variable that is known to be dead. arg0=mem, returns mem
{name: "VarLive", argLength: 1, aux: "Sym", symEffect: "Read"}, // aux is a *gc.Node of a variable that must be kept live. arg0=mem, returns mem {name: "VarLive", argLength: 1, aux: "Sym", symEffect: "Read"}, // aux is a *gc.Node of a variable that must be kept live. arg0=mem, returns mem
{name: "KeepAlive", argLength: 2, typ: "Mem"}, // arg[0] is a value that must be kept alive until this mark. arg[1]=mem, returns mem {name: "KeepAlive", argLength: 2, typ: "Mem"}, // arg[0] is a value that must be kept alive until this mark. arg[1]=mem, returns mem
{name: "RegKill"}, // regalloc has determined that the value in this register is dead
// Ops for breaking 64-bit operations on 32-bit architectures // Ops for breaking 64-bit operations on 32-bit architectures
{name: "Int64Make", argLength: 2, typ: "UInt64"}, // arg0=hi, arg1=lo {name: "Int64Make", argLength: 2, typ: "UInt64"}, // arg0=hi, arg1=lo
......
...@@ -2017,7 +2017,6 @@ const ( ...@@ -2017,7 +2017,6 @@ const (
OpVarKill OpVarKill
OpVarLive OpVarLive
OpKeepAlive OpKeepAlive
OpRegKill
OpInt64Make OpInt64Make
OpInt64Hi OpInt64Hi
OpInt64Lo OpInt64Lo
...@@ -24081,11 +24080,6 @@ var opcodeTable = [...]opInfo{ ...@@ -24081,11 +24080,6 @@ var opcodeTable = [...]opInfo{
argLen: 2, argLen: 2,
generic: true, generic: true,
}, },
{
name: "RegKill",
argLen: 0,
generic: true,
},
{ {
name: "Int64Make", name: "Int64Make",
argLen: 2, argLen: 2,
......
...@@ -242,9 +242,6 @@ type regAllocState struct { ...@@ -242,9 +242,6 @@ type regAllocState struct {
// current state of each (preregalloc) Value // current state of each (preregalloc) Value
values []valState values []valState
// names associated with each Value
valueNames [][]LocalSlot
// ID of SP, SB values // ID of SP, SB values
sp, sb ID sp, sb ID
...@@ -303,13 +300,6 @@ type startReg struct { ...@@ -303,13 +300,6 @@ type startReg struct {
// freeReg frees up register r. Any current user of r is kicked out. // freeReg frees up register r. Any current user of r is kicked out.
func (s *regAllocState) freeReg(r register) { func (s *regAllocState) freeReg(r register) {
s.freeOrResetReg(r, false)
}
// freeOrResetReg frees up register r. Any current user of r is kicked out.
// resetting indicates that the operation is only for bookkeeping,
// e.g. when clearing out state upon entry to a new block.
func (s *regAllocState) freeOrResetReg(r register, resetting bool) {
v := s.regs[r].v v := s.regs[r].v
if v == nil { if v == nil {
s.f.Fatalf("tried to free an already free register %d\n", r) s.f.Fatalf("tried to free an already free register %d\n", r)
...@@ -319,16 +309,6 @@ func (s *regAllocState) freeOrResetReg(r register, resetting bool) { ...@@ -319,16 +309,6 @@ func (s *regAllocState) freeOrResetReg(r register, resetting bool) {
if s.f.pass.debug > regDebug { if s.f.pass.debug > regDebug {
fmt.Printf("freeReg %s (dump %s/%s)\n", &s.registers[r], v, s.regs[r].c) fmt.Printf("freeReg %s (dump %s/%s)\n", &s.registers[r], v, s.regs[r].c)
} }
if !resetting && s.f.Config.ctxt.Flag_locationlists && len(s.valueNames[v.ID]) != 0 {
kill := s.curBlock.NewValue0(src.NoXPos, OpRegKill, types.TypeVoid)
for int(kill.ID) >= len(s.orig) {
s.orig = append(s.orig, nil)
}
for _, name := range s.valueNames[v.ID] {
s.f.NamedValues[name] = append(s.f.NamedValues[name], kill)
}
s.f.setHome(kill, &s.registers[r])
}
s.regs[r] = regState{} s.regs[r] = regState{}
s.values[v.ID].regs &^= regMask(1) << r s.values[v.ID].regs &^= regMask(1) << r
s.used &^= regMask(1) << r s.used &^= regMask(1) << r
...@@ -613,17 +593,6 @@ func (s *regAllocState) init(f *Func) { ...@@ -613,17 +593,6 @@ func (s *regAllocState) init(f *Func) {
s.values = make([]valState, f.NumValues()) s.values = make([]valState, f.NumValues())
s.orig = make([]*Value, f.NumValues()) s.orig = make([]*Value, f.NumValues())
s.copies = make(map[*Value]bool) s.copies = make(map[*Value]bool)
if s.f.Config.ctxt.Flag_locationlists {
s.valueNames = make([][]LocalSlot, f.NumValues())
for slot, values := range f.NamedValues {
if isSynthetic(&slot) {
continue
}
for _, value := range values {
s.valueNames[value.ID] = append(s.valueNames[value.ID], slot)
}
}
}
for _, b := range f.Blocks { for _, b := range f.Blocks {
for _, v := range b.Values { for _, v := range b.Values {
if !v.Type.IsMemory() && !v.Type.IsVoid() && !v.Type.IsFlags() && !v.Type.IsTuple() { if !v.Type.IsMemory() && !v.Type.IsVoid() && !v.Type.IsFlags() && !v.Type.IsTuple() {
...@@ -717,9 +686,7 @@ func (s *regAllocState) liveAfterCurrentInstruction(v *Value) bool { ...@@ -717,9 +686,7 @@ func (s *regAllocState) liveAfterCurrentInstruction(v *Value) bool {
// Sets the state of the registers to that encoded in regs. // Sets the state of the registers to that encoded in regs.
func (s *regAllocState) setState(regs []endReg) { func (s *regAllocState) setState(regs []endReg) {
for s.used != 0 { s.freeRegs(s.used)
s.freeOrResetReg(pickReg(s.used), true)
}
for _, x := range regs { for _, x := range regs {
s.assignReg(x.r, x.v, x.c) s.assignReg(x.r, x.v, x.c)
} }
...@@ -1035,7 +1002,7 @@ func (s *regAllocState) regalloc(f *Func) { ...@@ -1035,7 +1002,7 @@ func (s *regAllocState) regalloc(f *Func) {
pidx := e.i pidx := e.i
for _, v := range succ.Values { for _, v := range succ.Values {
if v.Op != OpPhi { if v.Op != OpPhi {
continue break
} }
if !s.values[v.ID].needReg { if !s.values[v.ID].needReg {
continue continue
...@@ -1598,9 +1565,6 @@ func (s *regAllocState) placeSpills() { ...@@ -1598,9 +1565,6 @@ func (s *regAllocState) placeSpills() {
for _, b := range f.Blocks { for _, b := range f.Blocks {
var m regMask var m regMask
for _, v := range b.Values { for _, v := range b.Values {
if v.Op == OpRegKill {
continue
}
if v.Op != OpPhi { if v.Op != OpPhi {
break break
} }
...@@ -1711,7 +1675,7 @@ func (s *regAllocState) placeSpills() { ...@@ -1711,7 +1675,7 @@ func (s *regAllocState) placeSpills() {
for _, b := range f.Blocks { for _, b := range f.Blocks {
nphi := 0 nphi := 0
for _, v := range b.Values { for _, v := range b.Values {
if v.Op != OpRegKill && v.Op != OpPhi { if v.Op != OpPhi {
break break
} }
nphi++ nphi++
...@@ -1832,9 +1796,6 @@ func (e *edgeState) setup(idx int, srcReg []endReg, dstReg []startReg, stacklive ...@@ -1832,9 +1796,6 @@ func (e *edgeState) setup(idx int, srcReg []endReg, dstReg []startReg, stacklive
} }
// Phis need their args to end up in a specific location. // Phis need their args to end up in a specific location.
for _, v := range e.b.Values { for _, v := range e.b.Values {
if v.Op == OpRegKill {
continue
}
if v.Op != OpPhi { if v.Op != OpPhi {
break break
} }
...@@ -2094,16 +2055,6 @@ func (e *edgeState) erase(loc Location) { ...@@ -2094,16 +2055,6 @@ func (e *edgeState) erase(loc Location) {
fmt.Printf("v%d no longer available in %s:%s\n", vid, loc, c) fmt.Printf("v%d no longer available in %s:%s\n", vid, loc, c)
} }
a[i], a = a[len(a)-1], a[:len(a)-1] a[i], a = a[len(a)-1], a[:len(a)-1]
if e.s.f.Config.ctxt.Flag_locationlists {
if _, isReg := loc.(*Register); isReg && int(c.ID) < len(e.s.valueNames) && len(e.s.valueNames[c.ID]) != 0 {
kill := e.p.NewValue0(src.NoXPos, OpRegKill, types.TypeVoid)
e.s.f.setHome(kill, loc)
for _, name := range e.s.valueNames[c.ID] {
e.s.f.NamedValues[name] = append(e.s.f.NamedValues[name], kill)
}
}
}
break break
} }
} }
......
...@@ -14,7 +14,7 @@ dy = <Optimized out, as expected> ...@@ -14,7 +14,7 @@ dy = <Optimized out, as expected>
dx = 2 dx = 2
dy = 2 dy = 2
63: hist := make([]int, 7) //gdb-opt=(dx/O,dy/O) // TODO sink is missing if this code is in 'test' instead of 'main' 63: hist := make([]int, 7) //gdb-opt=(dx/O,dy/O) // TODO sink is missing if this code is in 'test' instead of 'main'
dx = <Optimized out, as expected> dx = 2
dy = <Optimized out, as expected> dy = <Optimized out, as expected>
64: var reader io.Reader = strings.NewReader(cannedInput) //gdb-dbg=(hist/A) // TODO cannedInput/A is missing if this code is in 'test' instead of 'main' 64: var reader io.Reader = strings.NewReader(cannedInput) //gdb-dbg=(hist/A) // TODO cannedInput/A is missing if this code is in 'test' instead of 'main'
65: if len(os.Args) > 1 { 65: if len(os.Args) > 1 {
...@@ -116,11 +116,6 @@ scanner = (struct bufio.Scanner *) <A> ...@@ -116,11 +116,6 @@ scanner = (struct bufio.Scanner *) <A>
a = 0 a = 0
n = 0 n = 0
t = 0 t = 0
88: continue
87: if a == 0 { //gdb-opt=(a,n,t)
a = 3
n = 0
t = 0
92: fmt.Fprintf(os.Stderr, "%d\t%d\t%d\t%d\t%d\n", i, a, n, i*a, t) //gdb-dbg=(n,i,t) 92: fmt.Fprintf(os.Stderr, "%d\t%d\t%d\t%d\t%d\n", i, a, n, i*a, t) //gdb-dbg=(n,i,t)
91: n += a 91: n += a
92: fmt.Fprintf(os.Stderr, "%d\t%d\t%d\t%d\t%d\n", i, a, n, i*a, t) //gdb-dbg=(n,i,t) 92: fmt.Fprintf(os.Stderr, "%d\t%d\t%d\t%d\t%d\n", i, a, n, i*a, t) //gdb-dbg=(n,i,t)
...@@ -147,11 +142,6 @@ t = 3 ...@@ -147,11 +142,6 @@ t = 3
a = 0 a = 0
n = 6 n = 6
t = 9 t = 9
88: continue
87: if a == 0 { //gdb-opt=(a,n,t)
a = 2
n = 6
t = 9
92: fmt.Fprintf(os.Stderr, "%d\t%d\t%d\t%d\t%d\n", i, a, n, i*a, t) //gdb-dbg=(n,i,t) 92: fmt.Fprintf(os.Stderr, "%d\t%d\t%d\t%d\t%d\n", i, a, n, i*a, t) //gdb-dbg=(n,i,t)
91: n += a 91: n += a
92: fmt.Fprintf(os.Stderr, "%d\t%d\t%d\t%d\t%d\n", i, a, n, i*a, t) //gdb-dbg=(n,i,t) 92: fmt.Fprintf(os.Stderr, "%d\t%d\t%d\t%d\t%d\n", i, a, n, i*a, t) //gdb-dbg=(n,i,t)
...@@ -178,5 +168,4 @@ t = 17 ...@@ -178,5 +168,4 @@ t = 17
a = 0 a = 0
n = 9 n = 9
t = 22 t = 22
88: continue
98: } 98: }
...@@ -44,23 +44,6 @@ type Sym interface { ...@@ -44,23 +44,6 @@ type Sym interface {
Len() int64 Len() int64
} }
// A Location represents a variable's location at a particular PC range.
// It becomes a location list entry in the DWARF.
type Location struct {
StartPC, EndPC int64
Pieces []Piece
}
// A Piece represents the location of a particular part of a variable.
// It becomes part of a location list entry (a DW_OP_piece) in the DWARF.
type Piece struct {
Length int64
StackOffset int32
RegNum int16
Missing bool
OnStack bool // if true, RegNum is unset.
}
// A Var represents a local variable or a function parameter. // A Var represents a local variable or a function parameter.
type Var struct { type Var struct {
Name string Name string
...@@ -68,15 +51,17 @@ type Var struct { ...@@ -68,15 +51,17 @@ type Var struct {
IsReturnValue bool IsReturnValue bool
IsInlFormal bool IsInlFormal bool
StackOffset int32 StackOffset int32
LocationList []Location // This package can't use the ssa package, so it can't mention ssa.FuncDebug,
Scope int32 // so indirect through a closure.
Type Sym PutLocationList func(listSym, startPC Sym)
DeclFile string Scope int32
DeclLine uint Type Sym
DeclCol uint DeclFile string
InlIndex int32 // subtract 1 to form real index into InlTree DeclLine uint
ChildIndex int32 // child DIE index in abstract function DeclCol uint
IsInAbstract bool // variable exists in abstract function InlIndex int32 // subtract 1 to form real index into InlTree
ChildIndex int32 // child DIE index in abstract function
IsInAbstract bool // variable exists in abstract function
} }
// A Scope represents a lexical scope. All variables declared within a // A Scope represents a lexical scope. All variables declared within a
...@@ -1360,10 +1345,10 @@ func determineVarAbbrev(v *Var, fnabbrev int) (int, bool, bool) { ...@@ -1360,10 +1345,10 @@ func determineVarAbbrev(v *Var, fnabbrev int) (int, bool, bool) {
// convert to an inline abbreviation and emit an empty location. // convert to an inline abbreviation and emit an empty location.
missing := false missing := false
switch { switch {
case abbrev == DW_ABRV_AUTO_LOCLIST && len(v.LocationList) == 0: case abbrev == DW_ABRV_AUTO_LOCLIST && v.PutLocationList == nil:
missing = true missing = true
abbrev = DW_ABRV_AUTO abbrev = DW_ABRV_AUTO
case abbrev == DW_ABRV_PARAM_LOCLIST && len(v.LocationList) == 0: case abbrev == DW_ABRV_PARAM_LOCLIST && v.PutLocationList == nil:
missing = true missing = true
abbrev = DW_ABRV_PARAM abbrev = DW_ABRV_PARAM
} }
...@@ -1470,7 +1455,7 @@ func putvar(ctxt Context, s *FnState, v *Var, absfn Sym, fnabbrev, inlIndex int, ...@@ -1470,7 +1455,7 @@ func putvar(ctxt Context, s *FnState, v *Var, absfn Sym, fnabbrev, inlIndex int,
if abbrevUsesLoclist(abbrev) { if abbrevUsesLoclist(abbrev) {
putattr(ctxt, s.Info, abbrev, DW_FORM_sec_offset, DW_CLS_PTR, int64(s.Loc.Len()), s.Loc) putattr(ctxt, s.Info, abbrev, DW_FORM_sec_offset, DW_CLS_PTR, int64(s.Loc.Len()), s.Loc)
addLocList(ctxt, s.Loc, s.StartPC, v, encbuf) v.PutLocationList(s.Loc, s.StartPC)
} else { } else {
loc := encbuf[:0] loc := encbuf[:0]
switch { switch {
...@@ -1488,45 +1473,6 @@ func putvar(ctxt Context, s *FnState, v *Var, absfn Sym, fnabbrev, inlIndex int, ...@@ -1488,45 +1473,6 @@ func putvar(ctxt Context, s *FnState, v *Var, absfn Sym, fnabbrev, inlIndex int,
// Var has no children => no terminator // Var has no children => no terminator
} }
func addLocList(ctxt Context, listSym, startPC Sym, v *Var, encbuf []byte) {
// Base address entry: max ptr followed by the base address.
ctxt.AddInt(listSym, ctxt.PtrSize(), ^0)
ctxt.AddAddress(listSym, startPC, 0)
for _, entry := range v.LocationList {
ctxt.AddInt(listSym, ctxt.PtrSize(), entry.StartPC)
ctxt.AddInt(listSym, ctxt.PtrSize(), entry.EndPC)
locBuf := encbuf[:0]
for _, piece := range entry.Pieces {
if !piece.Missing {
if piece.OnStack {
if piece.StackOffset == 0 {
locBuf = append(locBuf, DW_OP_call_frame_cfa)
} else {
locBuf = append(locBuf, DW_OP_fbreg)
locBuf = AppendSleb128(locBuf, int64(piece.StackOffset))
}
} else {
if piece.RegNum < 32 {
locBuf = append(locBuf, DW_OP_reg0+byte(piece.RegNum))
} else {
locBuf = append(locBuf, DW_OP_regx)
locBuf = AppendUleb128(locBuf, uint64(piece.RegNum))
}
}
}
if len(entry.Pieces) > 1 {
locBuf = append(locBuf, DW_OP_piece)
locBuf = AppendUleb128(locBuf, uint64(piece.Length))
}
}
ctxt.AddInt(listSym, 2, int64(len(locBuf)))
ctxt.AddBytes(listSym, locBuf)
}
// End list
ctxt.AddInt(listSym, ctxt.PtrSize(), 0)
ctxt.AddInt(listSym, ctxt.PtrSize(), 0)
}
// VarsByOffset attaches the methods of sort.Interface to []*Var, // VarsByOffset attaches the methods of sort.Interface to []*Var,
// sorting in increasing StackOffset. // sorting in increasing StackOffset.
type VarsByOffset []*Var type VarsByOffset []*Var
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment