Commit 382b44eb authored by Russ Cox's avatar Russ Cox

[dev.cc] cmd/5g etc: code cleanup: delay var decls and eliminate dead code

Ran rsc.io/grind rev 6f0e601 on the source files.

The cleanups move var declarations as close to the use
as possible, splitting disjoint uses of the var into separate
variables. They also remove dead code (especially in
func sudoaddable), which helps with the var moving.

There's more cleanup to come, but this alone cuts the
time spent compiling html/template on my 2013 MacBook Pro
from 3.1 seconds to 2.3 seconds.

Change-Id: I4de499f47b1dd47a560c310bbcde6b08d425cfd6
Reviewed-on: https://go-review.googlesource.com/5637Reviewed-by: default avatarRob Pike <r@golang.org>
parent 3af0d791
This diff is collapsed.
...@@ -16,61 +16,43 @@ import "cmd/internal/gc" ...@@ -16,61 +16,43 @@ import "cmd/internal/gc"
* return 1 on success, 0 if op not handled. * return 1 on success, 0 if op not handled.
*/ */
func cgen64(n *gc.Node, res *gc.Node) { func cgen64(n *gc.Node, res *gc.Node) {
var t1 gc.Node
var t2 gc.Node
var l *gc.Node
var r *gc.Node
var lo1 gc.Node
var lo2 gc.Node
var hi1 gc.Node
var hi2 gc.Node
var al gc.Node
var ah gc.Node
var bl gc.Node
var bh gc.Node
var cl gc.Node
var ch gc.Node
var s gc.Node
var n1 gc.Node
var creg gc.Node
var p1 *obj.Prog
var p2 *obj.Prog
var p3 *obj.Prog
var p4 *obj.Prog
var p5 *obj.Prog
var p6 *obj.Prog
var v uint64
if res.Op != gc.OINDREG && res.Op != gc.ONAME { if res.Op != gc.OINDREG && res.Op != gc.ONAME {
gc.Dump("n", n) gc.Dump("n", n)
gc.Dump("res", res) gc.Dump("res", res)
gc.Fatal("cgen64 %v of %v", gc.Oconv(int(n.Op), 0), gc.Oconv(int(res.Op), 0)) gc.Fatal("cgen64 %v of %v", gc.Oconv(int(n.Op), 0), gc.Oconv(int(res.Op), 0))
} }
l = n.Left l := n.Left
var t1 gc.Node
if l.Addable == 0 { if l.Addable == 0 {
gc.Tempname(&t1, l.Type) gc.Tempname(&t1, l.Type)
cgen(l, &t1) cgen(l, &t1)
l = &t1 l = &t1
} }
var hi1 gc.Node
var lo1 gc.Node
split64(l, &lo1, &hi1) split64(l, &lo1, &hi1)
switch n.Op { switch n.Op {
default: default:
gc.Fatal("cgen64 %v", gc.Oconv(int(n.Op), 0)) gc.Fatal("cgen64 %v", gc.Oconv(int(n.Op), 0))
case gc.OMINUS: case gc.OMINUS:
var lo2 gc.Node
var hi2 gc.Node
split64(res, &lo2, &hi2) split64(res, &lo2, &hi2)
regalloc(&t1, lo1.Type, nil) regalloc(&t1, lo1.Type, nil)
var al gc.Node
regalloc(&al, lo1.Type, nil) regalloc(&al, lo1.Type, nil)
var ah gc.Node
regalloc(&ah, hi1.Type, nil) regalloc(&ah, hi1.Type, nil)
gins(arm.AMOVW, &lo1, &al) gins(arm.AMOVW, &lo1, &al)
gins(arm.AMOVW, &hi1, &ah) gins(arm.AMOVW, &hi1, &ah)
gmove(ncon(0), &t1) gmove(ncon(0), &t1)
p1 = gins(arm.ASUB, &al, &t1) p1 := gins(arm.ASUB, &al, &t1)
p1.Scond |= arm.C_SBIT p1.Scond |= arm.C_SBIT
gins(arm.AMOVW, &t1, &lo2) gins(arm.AMOVW, &t1, &lo2)
...@@ -89,7 +71,10 @@ func cgen64(n *gc.Node, res *gc.Node) { ...@@ -89,7 +71,10 @@ func cgen64(n *gc.Node, res *gc.Node) {
regalloc(&t1, lo1.Type, nil) regalloc(&t1, lo1.Type, nil)
gmove(ncon(^uint32(0)), &t1) gmove(ncon(^uint32(0)), &t1)
var lo2 gc.Node
var hi2 gc.Node
split64(res, &lo2, &hi2) split64(res, &lo2, &hi2)
var n1 gc.Node
regalloc(&n1, lo1.Type, nil) regalloc(&n1, lo1.Type, nil)
gins(arm.AMOVW, &lo1, &n1) gins(arm.AMOVW, &lo1, &n1)
...@@ -121,19 +106,24 @@ func cgen64(n *gc.Node, res *gc.Node) { ...@@ -121,19 +106,24 @@ func cgen64(n *gc.Node, res *gc.Node) {
} }
// setup for binary operators // setup for binary operators
r = n.Right r := n.Right
if r != nil && r.Addable == 0 { if r != nil && r.Addable == 0 {
var t2 gc.Node
gc.Tempname(&t2, r.Type) gc.Tempname(&t2, r.Type)
cgen(r, &t2) cgen(r, &t2)
r = &t2 r = &t2
} }
var hi2 gc.Node
var lo2 gc.Node
if gc.Is64(r.Type) { if gc.Is64(r.Type) {
split64(r, &lo2, &hi2) split64(r, &lo2, &hi2)
} }
var al gc.Node
regalloc(&al, lo1.Type, nil) regalloc(&al, lo1.Type, nil)
var ah gc.Node
regalloc(&ah, hi1.Type, nil) regalloc(&ah, hi1.Type, nil)
// Do op. Leave result in ah:al. // Do op. Leave result in ah:al.
...@@ -143,14 +133,16 @@ func cgen64(n *gc.Node, res *gc.Node) { ...@@ -143,14 +133,16 @@ func cgen64(n *gc.Node, res *gc.Node) {
// TODO: Constants // TODO: Constants
case gc.OADD: case gc.OADD:
var bl gc.Node
regalloc(&bl, gc.Types[gc.TPTR32], nil) regalloc(&bl, gc.Types[gc.TPTR32], nil)
var bh gc.Node
regalloc(&bh, gc.Types[gc.TPTR32], nil) regalloc(&bh, gc.Types[gc.TPTR32], nil)
gins(arm.AMOVW, &hi1, &ah) gins(arm.AMOVW, &hi1, &ah)
gins(arm.AMOVW, &lo1, &al) gins(arm.AMOVW, &lo1, &al)
gins(arm.AMOVW, &hi2, &bh) gins(arm.AMOVW, &hi2, &bh)
gins(arm.AMOVW, &lo2, &bl) gins(arm.AMOVW, &lo2, &bl)
p1 = gins(arm.AADD, &bl, &al) p1 := gins(arm.AADD, &bl, &al)
p1.Scond |= arm.C_SBIT p1.Scond |= arm.C_SBIT
gins(arm.AADC, &bh, &ah) gins(arm.AADC, &bh, &ah)
regfree(&bl) regfree(&bl)
...@@ -158,14 +150,16 @@ func cgen64(n *gc.Node, res *gc.Node) { ...@@ -158,14 +150,16 @@ func cgen64(n *gc.Node, res *gc.Node) {
// TODO: Constants. // TODO: Constants.
case gc.OSUB: case gc.OSUB:
var bl gc.Node
regalloc(&bl, gc.Types[gc.TPTR32], nil) regalloc(&bl, gc.Types[gc.TPTR32], nil)
var bh gc.Node
regalloc(&bh, gc.Types[gc.TPTR32], nil) regalloc(&bh, gc.Types[gc.TPTR32], nil)
gins(arm.AMOVW, &lo1, &al) gins(arm.AMOVW, &lo1, &al)
gins(arm.AMOVW, &hi1, &ah) gins(arm.AMOVW, &hi1, &ah)
gins(arm.AMOVW, &lo2, &bl) gins(arm.AMOVW, &lo2, &bl)
gins(arm.AMOVW, &hi2, &bh) gins(arm.AMOVW, &hi2, &bh)
p1 = gins(arm.ASUB, &bl, &al) p1 := gins(arm.ASUB, &bl, &al)
p1.Scond |= arm.C_SBIT p1.Scond |= arm.C_SBIT
gins(arm.ASBC, &bh, &ah) gins(arm.ASBC, &bh, &ah)
regfree(&bl) regfree(&bl)
...@@ -173,10 +167,14 @@ func cgen64(n *gc.Node, res *gc.Node) { ...@@ -173,10 +167,14 @@ func cgen64(n *gc.Node, res *gc.Node) {
// TODO(kaib): this can be done with 4 regs and does not need 6 // TODO(kaib): this can be done with 4 regs and does not need 6
case gc.OMUL: case gc.OMUL:
var bl gc.Node
regalloc(&bl, gc.Types[gc.TPTR32], nil) regalloc(&bl, gc.Types[gc.TPTR32], nil)
var bh gc.Node
regalloc(&bh, gc.Types[gc.TPTR32], nil) regalloc(&bh, gc.Types[gc.TPTR32], nil)
var cl gc.Node
regalloc(&cl, gc.Types[gc.TPTR32], nil) regalloc(&cl, gc.Types[gc.TPTR32], nil)
var ch gc.Node
regalloc(&ch, gc.Types[gc.TPTR32], nil) regalloc(&ch, gc.Types[gc.TPTR32], nil)
// load args into bh:bl and bh:bl. // load args into bh:bl and bh:bl.
...@@ -187,7 +185,7 @@ func cgen64(n *gc.Node, res *gc.Node) { ...@@ -187,7 +185,7 @@ func cgen64(n *gc.Node, res *gc.Node) {
gins(arm.AMOVW, &lo2, &cl) gins(arm.AMOVW, &lo2, &cl)
// bl * cl -> ah al // bl * cl -> ah al
p1 = gins(arm.AMULLU, nil, nil) p1 := gins(arm.AMULLU, nil, nil)
p1.From.Type = obj.TYPE_REG p1.From.Type = obj.TYPE_REG
p1.From.Reg = bl.Val.U.Reg p1.From.Reg = bl.Val.U.Reg
...@@ -239,9 +237,11 @@ func cgen64(n *gc.Node, res *gc.Node) { ...@@ -239,9 +237,11 @@ func cgen64(n *gc.Node, res *gc.Node) {
// shld hi:lo, c // shld hi:lo, c
// shld lo:t, c // shld lo:t, c
case gc.OLROT: case gc.OLROT:
v = uint64(gc.Mpgetfix(r.Val.U.Xval)) v := uint64(gc.Mpgetfix(r.Val.U.Xval))
var bl gc.Node
regalloc(&bl, lo1.Type, nil) regalloc(&bl, lo1.Type, nil)
var bh gc.Node
regalloc(&bh, hi1.Type, nil) regalloc(&bh, hi1.Type, nil)
if v >= 32 { if v >= 32 {
// reverse during load to do the first 32 bits of rotate // reverse during load to do the first 32 bits of rotate
...@@ -274,13 +274,24 @@ func cgen64(n *gc.Node, res *gc.Node) { ...@@ -274,13 +274,24 @@ func cgen64(n *gc.Node, res *gc.Node) {
regfree(&bh) regfree(&bh)
case gc.OLSH: case gc.OLSH:
var bl gc.Node
regalloc(&bl, lo1.Type, nil) regalloc(&bl, lo1.Type, nil)
var bh gc.Node
regalloc(&bh, hi1.Type, nil) regalloc(&bh, hi1.Type, nil)
gins(arm.AMOVW, &hi1, &bh) gins(arm.AMOVW, &hi1, &bh)
gins(arm.AMOVW, &lo1, &bl) gins(arm.AMOVW, &lo1, &bl)
var p6 *obj.Prog
var s gc.Node
var n1 gc.Node
var creg gc.Node
var p1 *obj.Prog
var p2 *obj.Prog
var p3 *obj.Prog
var p4 *obj.Prog
var p5 *obj.Prog
if r.Op == gc.OLITERAL { if r.Op == gc.OLITERAL {
v = uint64(gc.Mpgetfix(r.Val.U.Xval)) v := uint64(gc.Mpgetfix(r.Val.U.Xval))
if v >= 64 { if v >= 64 {
// TODO(kaib): replace with gins(AMOVW, nodintconst(0), &al) // TODO(kaib): replace with gins(AMOVW, nodintconst(0), &al)
// here and below (verify it optimizes to EOR) // here and below (verify it optimizes to EOR)
...@@ -316,6 +327,8 @@ func cgen64(n *gc.Node, res *gc.Node) { ...@@ -316,6 +327,8 @@ func cgen64(n *gc.Node, res *gc.Node) {
regalloc(&creg, gc.Types[gc.TUINT32], nil) regalloc(&creg, gc.Types[gc.TUINT32], nil)
if gc.Is64(r.Type) { if gc.Is64(r.Type) {
// shift is >= 1<<32 // shift is >= 1<<32
var cl gc.Node
var ch gc.Node
split64(r, &cl, &ch) split64(r, &cl, &ch)
gmove(&ch, &s) gmove(&ch, &s)
...@@ -422,13 +435,24 @@ func cgen64(n *gc.Node, res *gc.Node) { ...@@ -422,13 +435,24 @@ func cgen64(n *gc.Node, res *gc.Node) {
regfree(&bh) regfree(&bh)
case gc.ORSH: case gc.ORSH:
var bl gc.Node
regalloc(&bl, lo1.Type, nil) regalloc(&bl, lo1.Type, nil)
var bh gc.Node
regalloc(&bh, hi1.Type, nil) regalloc(&bh, hi1.Type, nil)
gins(arm.AMOVW, &hi1, &bh) gins(arm.AMOVW, &hi1, &bh)
gins(arm.AMOVW, &lo1, &bl) gins(arm.AMOVW, &lo1, &bl)
var p4 *obj.Prog
var p5 *obj.Prog
var n1 gc.Node
var p6 *obj.Prog
var s gc.Node
var p1 *obj.Prog
var p2 *obj.Prog
var creg gc.Node
var p3 *obj.Prog
if r.Op == gc.OLITERAL { if r.Op == gc.OLITERAL {
v = uint64(gc.Mpgetfix(r.Val.U.Xval)) v := uint64(gc.Mpgetfix(r.Val.U.Xval))
if v >= 64 { if v >= 64 {
if bh.Type.Etype == gc.TINT32 { if bh.Type.Etype == gc.TINT32 {
// MOVW bh->31, al // MOVW bh->31, al
...@@ -487,10 +511,13 @@ func cgen64(n *gc.Node, res *gc.Node) { ...@@ -487,10 +511,13 @@ func cgen64(n *gc.Node, res *gc.Node) {
regalloc(&creg, gc.Types[gc.TUINT32], nil) regalloc(&creg, gc.Types[gc.TUINT32], nil)
if gc.Is64(r.Type) { if gc.Is64(r.Type) {
// shift is >= 1<<32 // shift is >= 1<<32
var ch gc.Node
var cl gc.Node
split64(r, &cl, &ch) split64(r, &cl, &ch)
gmove(&ch, &s) gmove(&ch, &s)
gins(arm.ATST, &s, nil) gins(arm.ATST, &s, nil)
var p1 *obj.Prog
if bh.Type.Etype == gc.TINT32 { if bh.Type.Etype == gc.TINT32 {
p1 = gshift(arm.AMOVW, &bh, arm.SHIFT_AR, 31, &ah) p1 = gshift(arm.AMOVW, &bh, arm.SHIFT_AR, 31, &ah)
} else { } else {
...@@ -578,12 +605,12 @@ func cgen64(n *gc.Node, res *gc.Node) { ...@@ -578,12 +605,12 @@ func cgen64(n *gc.Node, res *gc.Node) {
if bh.Type.Etype == gc.TINT32 { if bh.Type.Etype == gc.TINT32 {
// MOVW bh->(s-32), al // MOVW bh->(s-32), al
p1 = gregshift(arm.AMOVW, &bh, arm.SHIFT_AR, &s, &al) p1 := gregshift(arm.AMOVW, &bh, arm.SHIFT_AR, &s, &al)
p1.Scond = arm.C_SCOND_LO p1.Scond = arm.C_SCOND_LO
} else { } else {
// MOVW bh>>(v-32), al // MOVW bh>>(v-32), al
p1 = gregshift(arm.AMOVW, &bh, arm.SHIFT_LR, &s, &al) p1 := gregshift(arm.AMOVW, &bh, arm.SHIFT_LR, &s, &al)
p1.Scond = arm.C_SCOND_LO p1.Scond = arm.C_SCOND_LO
} }
...@@ -708,6 +735,7 @@ func cgen64(n *gc.Node, res *gc.Node) { ...@@ -708,6 +735,7 @@ func cgen64(n *gc.Node, res *gc.Node) {
case gc.OXOR, case gc.OXOR,
gc.OAND, gc.OAND,
gc.OOR: gc.OOR:
var n1 gc.Node
regalloc(&n1, lo1.Type, nil) regalloc(&n1, lo1.Type, nil)
gins(arm.AMOVW, &lo1, &al) gins(arm.AMOVW, &lo1, &al)
...@@ -746,15 +774,13 @@ func cmp64(nl *gc.Node, nr *gc.Node, op int, likely int, to *obj.Prog) { ...@@ -746,15 +774,13 @@ func cmp64(nl *gc.Node, nr *gc.Node, op int, likely int, to *obj.Prog) {
var hi2 gc.Node var hi2 gc.Node
var r1 gc.Node var r1 gc.Node
var r2 gc.Node var r2 gc.Node
var br *obj.Prog
var t *gc.Type
split64(nl, &lo1, &hi1) split64(nl, &lo1, &hi1)
split64(nr, &lo2, &hi2) split64(nr, &lo2, &hi2)
// compare most significant word; // compare most significant word;
// if they differ, we're done. // if they differ, we're done.
t = hi1.Type t := hi1.Type
regalloc(&r1, gc.Types[gc.TINT32], nil) regalloc(&r1, gc.Types[gc.TINT32], nil)
regalloc(&r2, gc.Types[gc.TINT32], nil) regalloc(&r2, gc.Types[gc.TINT32], nil)
...@@ -764,7 +790,7 @@ func cmp64(nl *gc.Node, nr *gc.Node, op int, likely int, to *obj.Prog) { ...@@ -764,7 +790,7 @@ func cmp64(nl *gc.Node, nr *gc.Node, op int, likely int, to *obj.Prog) {
regfree(&r1) regfree(&r1)
regfree(&r2) regfree(&r2)
br = nil br := (*obj.Prog)(nil)
switch op { switch op {
default: default:
gc.Fatal("cmp64 %v %v", gc.Oconv(int(op), 0), gc.Tconv(t, 0)) gc.Fatal("cmp64 %v %v", gc.Oconv(int(op), 0), gc.Tconv(t, 0))
......
This diff is collapsed.
This diff is collapsed.
...@@ -41,17 +41,15 @@ var gactive uint32 ...@@ -41,17 +41,15 @@ var gactive uint32
// UNUSED // UNUSED
func peep(firstp *obj.Prog) { func peep(firstp *obj.Prog) {
var r *gc.Flow g := (*gc.Graph)(gc.Flowstart(firstp, nil))
var g *gc.Graph
var p *obj.Prog
var t int
g = gc.Flowstart(firstp, nil)
if g == nil { if g == nil {
return return
} }
gactive = 0 gactive = 0
var r *gc.Flow
var p *obj.Prog
var t int
loop1: loop1:
if gc.Debug['P'] != 0 && gc.Debug['v'] != 0 { if gc.Debug['P'] != 0 && gc.Debug['v'] != 0 {
gc.Dumpit("loop1", g.Start, 0) gc.Dumpit("loop1", g.Start, 0)
...@@ -121,7 +119,7 @@ loop1: ...@@ -121,7 +119,7 @@ loop1:
goto loop1 goto loop1
} }
for r = g.Start; r != nil; r = r.Link { for r := (*gc.Flow)(g.Start); r != nil; r = r.Link {
p = r.Prog p = r.Prog
switch p.As { switch p.As {
/* /*
...@@ -141,7 +139,7 @@ loop1: ...@@ -141,7 +139,7 @@ loop1:
} }
} }
for r = g.Start; r != nil; r = r.Link { for r := (*gc.Flow)(g.Start); r != nil; r = r.Link {
p = r.Prog p = r.Prog
switch p.As { switch p.As {
case arm.AMOVW, case arm.AMOVW,
...@@ -250,22 +248,17 @@ func regtyp(a *obj.Addr) bool { ...@@ -250,22 +248,17 @@ func regtyp(a *obj.Addr) bool {
* will be eliminated by copy propagation. * will be eliminated by copy propagation.
*/ */
func subprop(r0 *gc.Flow) bool { func subprop(r0 *gc.Flow) bool {
var p *obj.Prog p := (*obj.Prog)(r0.Prog)
var v1 *obj.Addr v1 := (*obj.Addr)(&p.From)
var v2 *obj.Addr
var r *gc.Flow
var t int
var info gc.ProgInfo
p = r0.Prog
v1 = &p.From
if !regtyp(v1) { if !regtyp(v1) {
return false return false
} }
v2 = &p.To v2 := (*obj.Addr)(&p.To)
if !regtyp(v2) { if !regtyp(v2) {
return false return false
} }
var r *gc.Flow
var info gc.ProgInfo
for r = gc.Uniqp(r0); r != nil; r = gc.Uniqp(r) { for r = gc.Uniqp(r0); r != nil; r = gc.Uniqp(r) {
if gc.Uniqs(r) == nil { if gc.Uniqs(r) == nil {
break break
...@@ -332,7 +325,7 @@ gotit: ...@@ -332,7 +325,7 @@ gotit:
} }
} }
t = int(v1.Reg) t := int(int(v1.Reg))
v1.Reg = v2.Reg v1.Reg = v2.Reg
v2.Reg = int16(t) v2.Reg = int16(t)
if gc.Debug['P'] != 0 { if gc.Debug['P'] != 0 {
...@@ -354,13 +347,9 @@ gotit: ...@@ -354,13 +347,9 @@ gotit:
* set v2 return success * set v2 return success
*/ */
func copyprop(g *gc.Graph, r0 *gc.Flow) bool { func copyprop(g *gc.Graph, r0 *gc.Flow) bool {
var p *obj.Prog p := (*obj.Prog)(r0.Prog)
var v1 *obj.Addr v1 := (*obj.Addr)(&p.From)
var v2 *obj.Addr v2 := (*obj.Addr)(&p.To)
p = r0.Prog
v1 = &p.From
v2 = &p.To
if copyas(v1, v2) { if copyas(v1, v2) {
return true return true
} }
...@@ -369,9 +358,6 @@ func copyprop(g *gc.Graph, r0 *gc.Flow) bool { ...@@ -369,9 +358,6 @@ func copyprop(g *gc.Graph, r0 *gc.Flow) bool {
} }
func copy1(v1 *obj.Addr, v2 *obj.Addr, r *gc.Flow, f int) bool { func copy1(v1 *obj.Addr, v2 *obj.Addr, r *gc.Flow, f int) bool {
var t int
var p *obj.Prog
if uint32(r.Active) == gactive { if uint32(r.Active) == gactive {
if gc.Debug['P'] != 0 { if gc.Debug['P'] != 0 {
fmt.Printf("act set; return 1\n") fmt.Printf("act set; return 1\n")
...@@ -383,6 +369,8 @@ func copy1(v1 *obj.Addr, v2 *obj.Addr, r *gc.Flow, f int) bool { ...@@ -383,6 +369,8 @@ func copy1(v1 *obj.Addr, v2 *obj.Addr, r *gc.Flow, f int) bool {
if gc.Debug['P'] != 0 { if gc.Debug['P'] != 0 {
fmt.Printf("copy %v->%v f=%d\n", gc.Ctxt.Dconv(v1), gc.Ctxt.Dconv(v2), f) fmt.Printf("copy %v->%v f=%d\n", gc.Ctxt.Dconv(v1), gc.Ctxt.Dconv(v2), f)
} }
var t int
var p *obj.Prog
for ; r != nil; r = r.S1 { for ; r != nil; r = r.S1 {
p = r.Prog p = r.Prog
if gc.Debug['P'] != 0 { if gc.Debug['P'] != 0 {
...@@ -473,11 +461,10 @@ func copy1(v1 *obj.Addr, v2 *obj.Addr, r *gc.Flow, f int) bool { ...@@ -473,11 +461,10 @@ func copy1(v1 *obj.Addr, v2 *obj.Addr, r *gc.Flow, f int) bool {
* The v1->v2 should be eliminated by copy propagation. * The v1->v2 should be eliminated by copy propagation.
*/ */
func constprop(c1 *obj.Addr, v1 *obj.Addr, r *gc.Flow) { func constprop(c1 *obj.Addr, v1 *obj.Addr, r *gc.Flow) {
var p *obj.Prog
if gc.Debug['P'] != 0 { if gc.Debug['P'] != 0 {
fmt.Printf("constprop %v->%v\n", gc.Ctxt.Dconv(c1), gc.Ctxt.Dconv(v1)) fmt.Printf("constprop %v->%v\n", gc.Ctxt.Dconv(c1), gc.Ctxt.Dconv(v1))
} }
var p *obj.Prog
for ; r != nil; r = r.S1 { for ; r != nil; r = r.S1 {
p = r.Prog p = r.Prog
if gc.Debug['P'] != 0 { if gc.Debug['P'] != 0 {
...@@ -527,17 +514,13 @@ func constprop(c1 *obj.Addr, v1 *obj.Addr, r *gc.Flow) { ...@@ -527,17 +514,13 @@ func constprop(c1 *obj.Addr, v1 *obj.Addr, r *gc.Flow) {
* MOVBS above can be a MOVBS, MOVBU, MOVHS or MOVHU. * MOVBS above can be a MOVBS, MOVBU, MOVHS or MOVHU.
*/ */
func shortprop(r *gc.Flow) bool { func shortprop(r *gc.Flow) bool {
var p *obj.Prog p := (*obj.Prog)(r.Prog)
var p1 *obj.Prog r1 := (*gc.Flow)(findpre(r, &p.From))
var r1 *gc.Flow
p = r.Prog
r1 = findpre(r, &p.From)
if r1 == nil { if r1 == nil {
return false return false
} }
p1 = r1.Prog p1 := (*obj.Prog)(r1.Prog)
if p1.As == p.As { if p1.As == p.As {
// Two consecutive extensions. // Two consecutive extensions.
goto gotit goto gotit
...@@ -583,15 +566,7 @@ gotit: ...@@ -583,15 +566,7 @@ gotit:
* .. * ..
*/ */
func shiftprop(r *gc.Flow) bool { func shiftprop(r *gc.Flow) bool {
var r1 *gc.Flow p := (*obj.Prog)(r.Prog)
var p *obj.Prog
var p1 *obj.Prog
var p2 *obj.Prog
var n int
var o int
var a obj.Addr
p = r.Prog
if p.To.Type != obj.TYPE_REG { if p.To.Type != obj.TYPE_REG {
if gc.Debug['P'] != 0 { if gc.Debug['P'] != 0 {
fmt.Printf("\tBOTCH: result not reg; FAILURE\n") fmt.Printf("\tBOTCH: result not reg; FAILURE\n")
...@@ -599,8 +574,8 @@ func shiftprop(r *gc.Flow) bool { ...@@ -599,8 +574,8 @@ func shiftprop(r *gc.Flow) bool {
return false return false
} }
n = int(p.To.Reg) n := int(int(p.To.Reg))
a = obj.Addr{} a := obj.Addr(obj.Addr{})
if p.Reg != 0 && p.Reg != p.To.Reg { if p.Reg != 0 && p.Reg != p.To.Reg {
a.Type = obj.TYPE_REG a.Type = obj.TYPE_REG
a.Reg = p.Reg a.Reg = p.Reg
...@@ -609,7 +584,8 @@ func shiftprop(r *gc.Flow) bool { ...@@ -609,7 +584,8 @@ func shiftprop(r *gc.Flow) bool {
if gc.Debug['P'] != 0 { if gc.Debug['P'] != 0 {
fmt.Printf("shiftprop\n%v", p) fmt.Printf("shiftprop\n%v", p)
} }
r1 = r r1 := (*gc.Flow)(r)
var p1 *obj.Prog
for { for {
/* find first use of shift result; abort if shift operands or result are changed */ /* find first use of shift result; abort if shift operands or result are changed */
r1 = gc.Uniqs(r1) r1 = gc.Uniqs(r1)
...@@ -736,9 +712,10 @@ func shiftprop(r *gc.Flow) bool { ...@@ -736,9 +712,10 @@ func shiftprop(r *gc.Flow) bool {
} }
/* check whether shift result is used subsequently */ /* check whether shift result is used subsequently */
p2 = p1 p2 := (*obj.Prog)(p1)
if int(p1.To.Reg) != n { if int(p1.To.Reg) != n {
var p1 *obj.Prog
for { for {
r1 = gc.Uniqs(r1) r1 = gc.Uniqs(r1)
if r1 == nil { if r1 == nil {
...@@ -773,7 +750,7 @@ func shiftprop(r *gc.Flow) bool { ...@@ -773,7 +750,7 @@ func shiftprop(r *gc.Flow) bool {
/* make the substitution */ /* make the substitution */
p2.From.Reg = 0 p2.From.Reg = 0
o = int(p.Reg) o := int(int(p.Reg))
if o == 0 { if o == 0 {
o = int(p.To.Reg) o = int(p.To.Reg)
} }
...@@ -870,14 +847,11 @@ func findinc(r *gc.Flow, r2 *gc.Flow, v *obj.Addr) *gc.Flow { ...@@ -870,14 +847,11 @@ func findinc(r *gc.Flow, r2 *gc.Flow, v *obj.Addr) *gc.Flow {
} }
func nochange(r *gc.Flow, r2 *gc.Flow, p *obj.Prog) bool { func nochange(r *gc.Flow, r2 *gc.Flow, p *obj.Prog) bool {
var a [3]obj.Addr
var i int
var n int
if r == r2 { if r == r2 {
return true return true
} }
n = 0 n := int(0)
var a [3]obj.Addr
if p.Reg != 0 && p.Reg != p.To.Reg { if p.Reg != 0 && p.Reg != p.To.Reg {
a[n].Type = obj.TYPE_REG a[n].Type = obj.TYPE_REG
a[n].Reg = p.Reg a[n].Reg = p.Reg
...@@ -900,6 +874,7 @@ func nochange(r *gc.Flow, r2 *gc.Flow, p *obj.Prog) bool { ...@@ -900,6 +874,7 @@ func nochange(r *gc.Flow, r2 *gc.Flow, p *obj.Prog) bool {
if n == 0 { if n == 0 {
return true return true
} }
var i int
for ; r != nil && r != r2; r = gc.Uniqs(r) { for ; r != nil && r != r2; r = gc.Uniqs(r) {
p = r.Prog p = r.Prog
for i = 0; i < n; i++ { for i = 0; i < n; i++ {
...@@ -939,9 +914,7 @@ func findu1(r *gc.Flow, v *obj.Addr) bool { ...@@ -939,9 +914,7 @@ func findu1(r *gc.Flow, v *obj.Addr) bool {
} }
func finduse(g *gc.Graph, r *gc.Flow, v *obj.Addr) bool { func finduse(g *gc.Graph, r *gc.Flow, v *obj.Addr) bool {
var r1 *gc.Flow for r1 := (*gc.Flow)(g.Start); r1 != nil; r1 = r1.Link {
for r1 = g.Start; r1 != nil; r1 = r1.Link {
r1.Active = 0 r1.Active = 0
} }
return findu1(r, v) return findu1(r, v)
...@@ -961,19 +934,12 @@ func finduse(g *gc.Graph, r *gc.Flow, v *obj.Addr) bool { ...@@ -961,19 +934,12 @@ func finduse(g *gc.Graph, r *gc.Flow, v *obj.Addr) bool {
* MOVBU R0<<0(R1),R0 * MOVBU R0<<0(R1),R0
*/ */
func xtramodes(g *gc.Graph, r *gc.Flow, a *obj.Addr) bool { func xtramodes(g *gc.Graph, r *gc.Flow, a *obj.Addr) bool {
var r1 *gc.Flow p := (*obj.Prog)(r.Prog)
var r2 *gc.Flow v := obj.Addr(*a)
var r3 *gc.Flow
var p *obj.Prog
var p1 *obj.Prog
var v obj.Addr
p = r.Prog
v = *a
v.Type = obj.TYPE_REG v.Type = obj.TYPE_REG
r1 = findpre(r, &v) r1 := (*gc.Flow)(findpre(r, &v))
if r1 != nil { if r1 != nil {
p1 = r1.Prog p1 := r1.Prog
if p1.To.Type == obj.TYPE_REG && p1.To.Reg == v.Reg { if p1.To.Type == obj.TYPE_REG && p1.To.Reg == v.Reg {
switch p1.As { switch p1.As {
case arm.AADD: case arm.AADD:
...@@ -1030,13 +996,14 @@ func xtramodes(g *gc.Graph, r *gc.Flow, a *obj.Addr) bool { ...@@ -1030,13 +996,14 @@ func xtramodes(g *gc.Graph, r *gc.Flow, a *obj.Addr) bool {
case arm.AMOVW: case arm.AMOVW:
if p1.From.Type == obj.TYPE_REG { if p1.From.Type == obj.TYPE_REG {
r2 = findinc(r1, r, &p1.From) r2 := (*gc.Flow)(findinc(r1, r, &p1.From))
if r2 != nil { if r2 != nil {
var r3 *gc.Flow
for r3 = gc.Uniqs(r2); r3.Prog.As == obj.ANOP; r3 = gc.Uniqs(r3) { for r3 = gc.Uniqs(r2); r3.Prog.As == obj.ANOP; r3 = gc.Uniqs(r3) {
} }
if r3 == r { if r3 == r {
/* post-indexing */ /* post-indexing */
p1 = r2.Prog p1 := r2.Prog
a.Reg = p1.To.Reg a.Reg = p1.To.Reg
a.Offset = p1.From.Offset a.Offset = p1.From.Offset
...@@ -1054,10 +1021,10 @@ func xtramodes(g *gc.Graph, r *gc.Flow, a *obj.Addr) bool { ...@@ -1054,10 +1021,10 @@ func xtramodes(g *gc.Graph, r *gc.Flow, a *obj.Addr) bool {
} }
if a != &p.From || a.Reg != p.To.Reg { if a != &p.From || a.Reg != p.To.Reg {
r1 = findinc(r, nil, &v) r1 := (*gc.Flow)(findinc(r, nil, &v))
if r1 != nil { if r1 != nil {
/* post-indexing */ /* post-indexing */
p1 = r1.Prog p1 := r1.Prog
a.Offset = p1.From.Offset a.Offset = p1.From.Offset
p.Scond |= arm.C_PBIT p.Scond |= arm.C_PBIT
...@@ -1775,19 +1742,17 @@ func successor(r *gc.Flow) *gc.Flow { ...@@ -1775,19 +1742,17 @@ func successor(r *gc.Flow) *gc.Flow {
} }
func applypred(rstart *gc.Flow, j *Joininfo, cond int, branch int) { func applypred(rstart *gc.Flow, j *Joininfo, cond int, branch int) {
var pred int
var r *gc.Flow
if j.len == 0 { if j.len == 0 {
return return
} }
var pred int
if cond == Truecond { if cond == Truecond {
pred = predinfo[rstart.Prog.As-arm.ABEQ].scond pred = predinfo[rstart.Prog.As-arm.ABEQ].scond
} else { } else {
pred = predinfo[rstart.Prog.As-arm.ABEQ].notscond pred = predinfo[rstart.Prog.As-arm.ABEQ].notscond
} }
for r = j.start; ; r = successor(r) { for r := (*gc.Flow)(j.start); ; r = successor(r) {
if r.Prog.As == arm.AB { if r.Prog.As == arm.AB {
if r != j.last || branch == Delbranch { if r != j.last || branch == Delbranch {
excise(r) excise(r)
...@@ -1813,13 +1778,12 @@ func applypred(rstart *gc.Flow, j *Joininfo, cond int, branch int) { ...@@ -1813,13 +1778,12 @@ func applypred(rstart *gc.Flow, j *Joininfo, cond int, branch int) {
} }
func predicate(g *gc.Graph) { func predicate(g *gc.Graph) {
var r *gc.Flow
var t1 int var t1 int
var t2 int var t2 int
var j1 Joininfo var j1 Joininfo
var j2 Joininfo var j2 Joininfo
for r = g.Start; r != nil; r = r.Link { for r := (*gc.Flow)(g.Start); r != nil; r = r.Link {
if isbranch(r.Prog) { if isbranch(r.Prog) {
t1 = joinsplit(r.S1, &j1) t1 = joinsplit(r.S1, &j1)
t2 = joinsplit(r.S2, &j2) t2 = joinsplit(r.S2, &j2)
...@@ -1861,8 +1825,6 @@ func smallindir(a *obj.Addr, reg *obj.Addr) bool { ...@@ -1861,8 +1825,6 @@ func smallindir(a *obj.Addr, reg *obj.Addr) bool {
} }
func excise(r *gc.Flow) { func excise(r *gc.Flow) {
var p *obj.Prog p := (*obj.Prog)(r.Prog)
p = r.Prog
obj.Nopout(p) obj.Nopout(p)
} }
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment