Commit f94e0745 authored by Keith Randall's avatar Keith Randall

[dev.ssa] cmd/compile: prepare for some load+op combining

Rename StoreConst to ValAndOff so we can use it for other ops.
Make ValAndOff print nicely.

Add some notes & checks related to my aborted attempt to
implement combined CMP+load ops.

Change-Id: I2f901d12d42bc5a82879af0334806aa184a97e27
Reviewed-on: https://go-review.googlesource.com/18947
Run-TryBot: David Chase <drchase@google.com>
Reviewed-by: default avatarDavid Chase <drchase@google.com>
parent 5ba31940
...@@ -4092,7 +4092,7 @@ func (s *genState) genValue(v *ssa.Value) { ...@@ -4092,7 +4092,7 @@ func (s *genState) genValue(v *ssa.Value) {
case ssa.OpAMD64MOVQstoreconst, ssa.OpAMD64MOVLstoreconst, ssa.OpAMD64MOVWstoreconst, ssa.OpAMD64MOVBstoreconst: case ssa.OpAMD64MOVQstoreconst, ssa.OpAMD64MOVLstoreconst, ssa.OpAMD64MOVWstoreconst, ssa.OpAMD64MOVBstoreconst:
p := Prog(v.Op.Asm()) p := Prog(v.Op.Asm())
p.From.Type = obj.TYPE_CONST p.From.Type = obj.TYPE_CONST
sc := ssa.StoreConst(v.AuxInt) sc := ssa.ValAndOff(v.AuxInt)
i := sc.Val() i := sc.Val()
switch v.Op { switch v.Op {
case ssa.OpAMD64MOVBstoreconst: case ssa.OpAMD64MOVBstoreconst:
...@@ -4372,7 +4372,7 @@ func (s *genState) genValue(v *ssa.Value) { ...@@ -4372,7 +4372,7 @@ func (s *genState) genValue(v *ssa.Value) {
return return
} }
case ssa.OpAMD64MOVQstoreconst, ssa.OpAMD64MOVLstoreconst, ssa.OpAMD64MOVWstoreconst, ssa.OpAMD64MOVBstoreconst: case ssa.OpAMD64MOVQstoreconst, ssa.OpAMD64MOVLstoreconst, ssa.OpAMD64MOVWstoreconst, ssa.OpAMD64MOVBstoreconst:
off := ssa.StoreConst(v.AuxInt).Off() off := ssa.ValAndOff(v.AuxInt).Off()
if w.Args[0] == v.Args[0] && w.Aux == nil && off >= 0 && off < minZeroPage { if w.Args[0] == v.Args[0] && w.Aux == nil && off >= 0 && off < minZeroPage {
if Debug_checknil != 0 && int(v.Line) > 1 { if Debug_checknil != 0 && int(v.Line) > 1 {
Warnl(int(v.Line), "removed nil check") Warnl(int(v.Line), "removed nil check")
......
...@@ -20,7 +20,6 @@ Optimizations (better compiled code) ...@@ -20,7 +20,6 @@ Optimizations (better compiled code)
- Expand current optimizations to all bit widths - Expand current optimizations to all bit widths
- Add a value range propagation pass (for bounds elim & bitwidth reduction) - Add a value range propagation pass (for bounds elim & bitwidth reduction)
- Make dead store pass inter-block - Make dead store pass inter-block
- (x86) Combine loads into other ops
- (x86) More combining address arithmetic into loads/stores - (x86) More combining address arithmetic into loads/stores
- (x86) use ADDQ instead of LEAQ when we can - (x86) use ADDQ instead of LEAQ when we can
- redundant CMP in sequences like this: - redundant CMP in sequences like this:
...@@ -38,8 +37,6 @@ Optimizations (better compiled code) ...@@ -38,8 +37,6 @@ Optimizations (better compiled code)
Same for interfaces? Same for interfaces?
- boolean logic: movb/xorb$1/testb/jeq -> movb/testb/jne - boolean logic: movb/xorb$1/testb/jeq -> movb/testb/jne
- (ADDQconst (SUBQconst x)) and vice-versa - (ADDQconst (SUBQconst x)) and vice-versa
- (CMP (Load ...)) and (CMPconst (Load ...)) in one instruction
(all instructions, really)
- combine LEAQs - combine LEAQs
- store followed by load to same address - store followed by load to same address
- (CMPconst [0] (AND x y)) -> (TEST x y) - (CMPconst [0] (AND x y)) -> (TEST x y)
...@@ -50,6 +47,10 @@ Optimizations (better compiled code) ...@@ -50,6 +47,10 @@ Optimizations (better compiled code)
- better computing of &&/|| in non-if/for contexts - better computing of &&/|| in non-if/for contexts
- OpArrayIndex should take its index in AuxInt, not a full value. - OpArrayIndex should take its index in AuxInt, not a full value.
- remove FLAGS from REP instruction clobbers - remove FLAGS from REP instruction clobbers
- (x86) Combine loads into other ops
Note that this is challenging for ops that generate flags
because flagalloc wants to move those instructions around for
flag regeneration.
Optimizations (better compiler) Optimizations (better compiler)
------------------------------- -------------------------------
......
...@@ -42,11 +42,14 @@ func flagalloc(f *Func) { ...@@ -42,11 +42,14 @@ func flagalloc(f *Func) {
} }
} }
} }
for _, p := range b.Preds { if flag != nil {
end[p.ID] = flag for _, p := range b.Preds {
end[p.ID] = flag
}
} }
} }
} }
// For blocks which have a flags control value, that's the only value // For blocks which have a flags control value, that's the only value
// we can leave in the flags register at the end of the block. (There // we can leave in the flags register at the end of the block. (There
// is no place to put a flag regeneration instruction.) // is no place to put a flag regeneration instruction.)
......
...@@ -556,24 +556,24 @@ ...@@ -556,24 +556,24 @@
(MOVOstore [off1] {sym} (ADDQconst [off2] ptr) val mem) -> (MOVOstore [addOff(off1, off2)] {sym} ptr val mem) (MOVOstore [off1] {sym} (ADDQconst [off2] ptr) val mem) -> (MOVOstore [addOff(off1, off2)] {sym} ptr val mem)
// Fold constants into stores. // Fold constants into stores.
(MOVQstore [off] {sym} ptr (MOVQconst [c]) mem) && validStoreConst(c,off) -> (MOVQstore [off] {sym} ptr (MOVQconst [c]) mem) && validValAndOff(c,off) ->
(MOVQstoreconst [makeStoreConst(c,off)] {sym} ptr mem) (MOVQstoreconst [makeValAndOff(c,off)] {sym} ptr mem)
(MOVLstore [off] {sym} ptr (MOVLconst [c]) mem) && validStoreConstOff(off) -> (MOVLstore [off] {sym} ptr (MOVLconst [c]) mem) && validOff(off) ->
(MOVLstoreconst [makeStoreConst(int64(int32(c)),off)] {sym} ptr mem) (MOVLstoreconst [makeValAndOff(int64(int32(c)),off)] {sym} ptr mem)
(MOVWstore [off] {sym} ptr (MOVWconst [c]) mem) && validStoreConstOff(off) -> (MOVWstore [off] {sym} ptr (MOVWconst [c]) mem) && validOff(off) ->
(MOVWstoreconst [makeStoreConst(int64(int16(c)),off)] {sym} ptr mem) (MOVWstoreconst [makeValAndOff(int64(int16(c)),off)] {sym} ptr mem)
(MOVBstore [off] {sym} ptr (MOVBconst [c]) mem) && validStoreConstOff(off) -> (MOVBstore [off] {sym} ptr (MOVBconst [c]) mem) && validOff(off) ->
(MOVBstoreconst [makeStoreConst(int64(int8(c)),off)] {sym} ptr mem) (MOVBstoreconst [makeValAndOff(int64(int8(c)),off)] {sym} ptr mem)
// Fold address offsets into constant stores. // Fold address offsets into constant stores.
(MOVQstoreconst [sc] {s} (ADDQconst [off] ptr) mem) && StoreConst(sc).canAdd(off) -> (MOVQstoreconst [sc] {s} (ADDQconst [off] ptr) mem) && ValAndOff(sc).canAdd(off) ->
(MOVQstoreconst [StoreConst(sc).add(off)] {s} ptr mem) (MOVQstoreconst [ValAndOff(sc).add(off)] {s} ptr mem)
(MOVLstoreconst [sc] {s} (ADDQconst [off] ptr) mem) && StoreConst(sc).canAdd(off) -> (MOVLstoreconst [sc] {s} (ADDQconst [off] ptr) mem) && ValAndOff(sc).canAdd(off) ->
(MOVLstoreconst [StoreConst(sc).add(off)] {s} ptr mem) (MOVLstoreconst [ValAndOff(sc).add(off)] {s} ptr mem)
(MOVWstoreconst [sc] {s} (ADDQconst [off] ptr) mem) && StoreConst(sc).canAdd(off) -> (MOVWstoreconst [sc] {s} (ADDQconst [off] ptr) mem) && ValAndOff(sc).canAdd(off) ->
(MOVWstoreconst [StoreConst(sc).add(off)] {s} ptr mem) (MOVWstoreconst [ValAndOff(sc).add(off)] {s} ptr mem)
(MOVBstoreconst [sc] {s} (ADDQconst [off] ptr) mem) && StoreConst(sc).canAdd(off) -> (MOVBstoreconst [sc] {s} (ADDQconst [off] ptr) mem) && ValAndOff(sc).canAdd(off) ->
(MOVBstoreconst [StoreConst(sc).add(off)] {s} ptr mem) (MOVBstoreconst [ValAndOff(sc).add(off)] {s} ptr mem)
// We need to fold LEAQ into the MOVx ops so that the live variable analysis knows // We need to fold LEAQ into the MOVx ops so that the live variable analysis knows
// what variables are being read/written by the ops. // what variables are being read/written by the ops.
...@@ -607,14 +607,14 @@ ...@@ -607,14 +607,14 @@
(MOVOstore [off1] {sym1} (LEAQ [off2] {sym2} base) val mem) && canMergeSym(sym1, sym2) -> (MOVOstore [off1] {sym1} (LEAQ [off2] {sym2} base) val mem) && canMergeSym(sym1, sym2) ->
(MOVOstore [addOff(off1,off2)] {mergeSym(sym1,sym2)} base val mem) (MOVOstore [addOff(off1,off2)] {mergeSym(sym1,sym2)} base val mem)
(MOVQstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem) && canMergeSym(sym1, sym2) && StoreConst(sc).canAdd(off) -> (MOVQstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem) && canMergeSym(sym1, sym2) && ValAndOff(sc).canAdd(off) ->
(MOVQstoreconst [StoreConst(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem) (MOVQstoreconst [ValAndOff(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem)
(MOVLstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem) && canMergeSym(sym1, sym2) && StoreConst(sc).canAdd(off) -> (MOVLstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem) && canMergeSym(sym1, sym2) && ValAndOff(sc).canAdd(off) ->
(MOVLstoreconst [StoreConst(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem) (MOVLstoreconst [ValAndOff(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem)
(MOVWstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem) && canMergeSym(sym1, sym2) && StoreConst(sc).canAdd(off) -> (MOVWstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem) && canMergeSym(sym1, sym2) && ValAndOff(sc).canAdd(off) ->
(MOVWstoreconst [StoreConst(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem) (MOVWstoreconst [ValAndOff(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem)
(MOVBstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem) && canMergeSym(sym1, sym2) && StoreConst(sc).canAdd(off) -> (MOVBstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem) && canMergeSym(sym1, sym2) && ValAndOff(sc).canAdd(off) ->
(MOVBstoreconst [StoreConst(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem) (MOVBstoreconst [ValAndOff(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem)
// indexed loads and stores // indexed loads and stores
(MOVQloadidx8 [off1] {sym} (ADDQconst [off2] ptr) idx mem) -> (MOVQloadidx8 [addOff(off1, off2)] {sym} ptr idx mem) (MOVQloadidx8 [off1] {sym} (ADDQconst [off2] ptr) idx mem) -> (MOVQloadidx8 [addOff(off1, off2)] {sym} ptr idx mem)
...@@ -647,16 +647,16 @@ ...@@ -647,16 +647,16 @@
(Zero [8] destptr mem) -> (MOVQstoreconst [0] destptr mem) (Zero [8] destptr mem) -> (MOVQstoreconst [0] destptr mem)
(Zero [3] destptr mem) -> (Zero [3] destptr mem) ->
(MOVBstoreconst [makeStoreConst(0,2)] destptr (MOVBstoreconst [makeValAndOff(0,2)] destptr
(MOVWstoreconst [0] destptr mem)) (MOVWstoreconst [0] destptr mem))
(Zero [5] destptr mem) -> (Zero [5] destptr mem) ->
(MOVBstoreconst [makeStoreConst(0,4)] destptr (MOVBstoreconst [makeValAndOff(0,4)] destptr
(MOVLstoreconst [0] destptr mem)) (MOVLstoreconst [0] destptr mem))
(Zero [6] destptr mem) -> (Zero [6] destptr mem) ->
(MOVWstoreconst [makeStoreConst(0,4)] destptr (MOVWstoreconst [makeValAndOff(0,4)] destptr
(MOVLstoreconst [0] destptr mem)) (MOVLstoreconst [0] destptr mem))
(Zero [7] destptr mem) -> (Zero [7] destptr mem) ->
(MOVLstoreconst [makeStoreConst(0,3)] destptr (MOVLstoreconst [makeValAndOff(0,3)] destptr
(MOVLstoreconst [0] destptr mem)) (MOVLstoreconst [0] destptr mem))
// Strip off any fractional word zeroing. // Strip off any fractional word zeroing.
...@@ -666,16 +666,16 @@ ...@@ -666,16 +666,16 @@
// Zero small numbers of words directly. // Zero small numbers of words directly.
(Zero [16] destptr mem) -> (Zero [16] destptr mem) ->
(MOVQstoreconst [makeStoreConst(0,8)] destptr (MOVQstoreconst [makeValAndOff(0,8)] destptr
(MOVQstoreconst [0] destptr mem)) (MOVQstoreconst [0] destptr mem))
(Zero [24] destptr mem) -> (Zero [24] destptr mem) ->
(MOVQstoreconst [makeStoreConst(0,16)] destptr (MOVQstoreconst [makeValAndOff(0,16)] destptr
(MOVQstoreconst [makeStoreConst(0,8)] destptr (MOVQstoreconst [makeValAndOff(0,8)] destptr
(MOVQstoreconst [0] destptr mem))) (MOVQstoreconst [0] destptr mem)))
(Zero [32] destptr mem) -> (Zero [32] destptr mem) ->
(MOVQstoreconst [makeStoreConst(0,24)] destptr (MOVQstoreconst [makeValAndOff(0,24)] destptr
(MOVQstoreconst [makeStoreConst(0,16)] destptr (MOVQstoreconst [makeValAndOff(0,16)] destptr
(MOVQstoreconst [makeStoreConst(0,8)] destptr (MOVQstoreconst [makeValAndOff(0,8)] destptr
(MOVQstoreconst [0] destptr mem)))) (MOVQstoreconst [0] destptr mem))))
// Medium zeroing uses a duff device. // Medium zeroing uses a duff device.
......
...@@ -382,8 +382,8 @@ func init() { ...@@ -382,8 +382,8 @@ func init() {
// For storeconst ops, the AuxInt field encodes both // For storeconst ops, the AuxInt field encodes both
// the value to store and an address offset of the store. // the value to store and an address offset of the store.
// Cast AuxInt to a StoreConst to extract Val and Off fields. // Cast AuxInt to a ValAndOff to extract Val and Off fields.
{name: "MOVBstoreconst", reg: gpstoreconst, asm: "MOVB", typ: "Mem"}, // store low byte of StoreConst(AuxInt).Val() to arg0+StoreConst(AuxInt).Off()+aux. arg1=mem {name: "MOVBstoreconst", reg: gpstoreconst, asm: "MOVB", typ: "Mem"}, // store low byte of ValAndOff(AuxInt).Val() to arg0+ValAndOff(AuxInt).Off()+aux. arg1=mem
{name: "MOVWstoreconst", reg: gpstoreconst, asm: "MOVW", typ: "Mem"}, // store low 2 bytes of ... {name: "MOVWstoreconst", reg: gpstoreconst, asm: "MOVW", typ: "Mem"}, // store low 2 bytes of ...
{name: "MOVLstoreconst", reg: gpstoreconst, asm: "MOVL", typ: "Mem"}, // store low 4 bytes of ... {name: "MOVLstoreconst", reg: gpstoreconst, asm: "MOVL", typ: "Mem"}, // store low 4 bytes of ...
{name: "MOVQstoreconst", reg: gpstoreconst, asm: "MOVQ", typ: "Mem"}, // store 8 bytes of ... {name: "MOVQstoreconst", reg: gpstoreconst, asm: "MOVQ", typ: "Mem"}, // store 8 bytes of ...
......
...@@ -4,6 +4,8 @@ ...@@ -4,6 +4,8 @@
package ssa package ssa
import "fmt"
// An Op encodes the specific operation that a Value performs. // An Op encodes the specific operation that a Value performs.
// Opcodes' semantics can be modified by the type and aux fields of the Value. // Opcodes' semantics can be modified by the type and aux fields of the Value.
// For instance, OpAdd can be 32 or 64 bit, signed or unsigned, float or complex, depending on Value.Type. // For instance, OpAdd can be 32 or 64 bit, signed or unsigned, float or complex, depending on Value.Type.
...@@ -30,57 +32,67 @@ type regInfo struct { ...@@ -30,57 +32,67 @@ type regInfo struct {
outputs []regMask // NOTE: values can only have 1 output for now. outputs []regMask // NOTE: values can only have 1 output for now.
} }
// A StoreConst is used by the MOVXstoreconst opcodes. It holds // A ValAndOff is used by the several opcodes. It holds
// both the value to store and an offset from the store pointer. // both a value and a pointer offset.
// A StoreConst is intended to be encoded into an AuxInt field. // A ValAndOff is intended to be encoded into an AuxInt field.
// The zero StoreConst encodes a value of 0 and an offset of 0. // The zero ValAndOff encodes a value of 0 and an offset of 0.
// The high 32 bits hold a value to be stored. // The high 32 bits hold a value.
// The low 32 bits hold a pointer offset. // The low 32 bits hold a pointer offset.
type StoreConst int64 type ValAndOff int64
func (sc StoreConst) Val() int64 { func (x ValAndOff) Val() int64 {
return int64(sc) >> 32 return int64(x) >> 32
}
func (x ValAndOff) Off() int64 {
return int64(int32(x))
} }
func (sc StoreConst) Off() int64 { func (x ValAndOff) Int64() int64 {
return int64(int32(sc)) return int64(x)
} }
func (sc StoreConst) Int64() int64 { func (x ValAndOff) String() string {
return int64(sc) return fmt.Sprintf("val=%d,off=%d", x.Val(), x.Off())
} }
// validStoreConstOff reports whether the offset can be used // validVal reports whether the value can be used
// as an argument to makeStoreConst. // as an argument to makeValAndOff.
func validStoreConstOff(off int64) bool { func validVal(val int64) bool {
return val == int64(int32(val))
}
// validOff reports whether the offset can be used
// as an argument to makeValAndOff.
func validOff(off int64) bool {
return off == int64(int32(off)) return off == int64(int32(off))
} }
// validStoreConst reports whether we can fit the value and offset into // validValAndOff reports whether we can fit the value and offset into
// a StoreConst value. // a ValAndOff value.
func validStoreConst(val, off int64) bool { func validValAndOff(val, off int64) bool {
if val != int64(int32(val)) { if !validVal(val) {
return false return false
} }
if !validStoreConstOff(off) { if !validOff(off) {
return false return false
} }
return true return true
} }
// encode encodes a StoreConst into an int64 suitable for storing in an AuxInt field. // makeValAndOff encodes a ValAndOff into an int64 suitable for storing in an AuxInt field.
func makeStoreConst(val, off int64) int64 { func makeValAndOff(val, off int64) int64 {
if !validStoreConst(val, off) { if !validValAndOff(val, off) {
panic("invalid makeStoreConst") panic("invalid makeValAndOff")
} }
return StoreConst(val<<32 + int64(uint32(off))).Int64() return ValAndOff(val<<32 + int64(uint32(off))).Int64()
} }
func (sc StoreConst) canAdd(off int64) bool { func (x ValAndOff) canAdd(off int64) bool {
newoff := sc.Off() + off newoff := x.Off() + off
return newoff == int64(int32(newoff)) return newoff == int64(int32(newoff))
} }
func (sc StoreConst) add(off int64) int64 {
if !sc.canAdd(off) { func (x ValAndOff) add(off int64) int64 {
panic("invalid StoreConst.add") if !x.canAdd(off) {
panic("invalid ValAndOff.add")
} }
return makeStoreConst(sc.Val(), sc.Off()+off) return makeValAndOff(x.Val(), x.Off()+off)
} }
...@@ -61,16 +61,22 @@ func (v *Value) String() string { ...@@ -61,16 +61,22 @@ func (v *Value) String() string {
func (v *Value) LongString() string { func (v *Value) LongString() string {
s := fmt.Sprintf("v%d = %s", v.ID, v.Op.String()) s := fmt.Sprintf("v%d = %s", v.ID, v.Op.String())
s += " <" + v.Type.String() + ">" s += " <" + v.Type.String() + ">"
if v.AuxInt != 0 { // TODO: use some operator property flags to decide
s += fmt.Sprintf(" [%d]", v.AuxInt) // what is encoded in the AuxInt field.
switch v.Op {
switch { case OpConst32F, OpConst64F:
case v.Op == OpConst32F || v.Op == OpConst64F: s += fmt.Sprintf(" [%g]", math.Float64frombits(uint64(v.AuxInt)))
s += fmt.Sprintf("(%g)", math.Float64frombits(uint64(v.AuxInt))) case OpConstBool:
case v.Op == OpConstBool && v.AuxInt == 0: if v.AuxInt == 0 {
s += " (false)" s += " [false]"
case v.Op == OpConstBool && v.AuxInt == 1: } else {
s += " (true)" s += " [true]"
}
case OpAMD64MOVBstoreconst, OpAMD64MOVWstoreconst, OpAMD64MOVLstoreconst, OpAMD64MOVQstoreconst:
s += fmt.Sprintf(" [%s]", ValAndOff(v.AuxInt))
default:
if v.AuxInt != 0 {
s += fmt.Sprintf(" [%d]", v.AuxInt)
} }
} }
if v.Aux != nil { if v.Aux != nil {
...@@ -132,6 +138,11 @@ func (v *Value) copyInto(b *Block) *Value { ...@@ -132,6 +138,11 @@ func (v *Value) copyInto(b *Block) *Value {
c.Aux = v.Aux c.Aux = v.Aux
c.AuxInt = v.AuxInt c.AuxInt = v.AuxInt
c.AddArgs(v.Args...) c.AddArgs(v.Args...)
for _, a := range v.Args {
if a.Type.IsMemory() {
v.Fatalf("can't move a value with a memory arg %s", v.LongString())
}
}
return c return c
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment