Commit f94e0745 authored by Keith Randall's avatar Keith Randall

[dev.ssa] cmd/compile: prepare for some load+op combining

Rename StoreConst to ValAndOff so we can use it for other ops.
Make ValAndOff print nicely.

Add some notes & checks related to my aborted attempt to
implement combined CMP+load ops.

Change-Id: I2f901d12d42bc5a82879af0334806aa184a97e27
Reviewed-on: https://go-review.googlesource.com/18947
Run-TryBot: David Chase <drchase@google.com>
Reviewed-by: default avatarDavid Chase <drchase@google.com>
parent 5ba31940
...@@ -4092,7 +4092,7 @@ func (s *genState) genValue(v *ssa.Value) { ...@@ -4092,7 +4092,7 @@ func (s *genState) genValue(v *ssa.Value) {
case ssa.OpAMD64MOVQstoreconst, ssa.OpAMD64MOVLstoreconst, ssa.OpAMD64MOVWstoreconst, ssa.OpAMD64MOVBstoreconst: case ssa.OpAMD64MOVQstoreconst, ssa.OpAMD64MOVLstoreconst, ssa.OpAMD64MOVWstoreconst, ssa.OpAMD64MOVBstoreconst:
p := Prog(v.Op.Asm()) p := Prog(v.Op.Asm())
p.From.Type = obj.TYPE_CONST p.From.Type = obj.TYPE_CONST
sc := ssa.StoreConst(v.AuxInt) sc := ssa.ValAndOff(v.AuxInt)
i := sc.Val() i := sc.Val()
switch v.Op { switch v.Op {
case ssa.OpAMD64MOVBstoreconst: case ssa.OpAMD64MOVBstoreconst:
...@@ -4372,7 +4372,7 @@ func (s *genState) genValue(v *ssa.Value) { ...@@ -4372,7 +4372,7 @@ func (s *genState) genValue(v *ssa.Value) {
return return
} }
case ssa.OpAMD64MOVQstoreconst, ssa.OpAMD64MOVLstoreconst, ssa.OpAMD64MOVWstoreconst, ssa.OpAMD64MOVBstoreconst: case ssa.OpAMD64MOVQstoreconst, ssa.OpAMD64MOVLstoreconst, ssa.OpAMD64MOVWstoreconst, ssa.OpAMD64MOVBstoreconst:
off := ssa.StoreConst(v.AuxInt).Off() off := ssa.ValAndOff(v.AuxInt).Off()
if w.Args[0] == v.Args[0] && w.Aux == nil && off >= 0 && off < minZeroPage { if w.Args[0] == v.Args[0] && w.Aux == nil && off >= 0 && off < minZeroPage {
if Debug_checknil != 0 && int(v.Line) > 1 { if Debug_checknil != 0 && int(v.Line) > 1 {
Warnl(int(v.Line), "removed nil check") Warnl(int(v.Line), "removed nil check")
......
...@@ -20,7 +20,6 @@ Optimizations (better compiled code) ...@@ -20,7 +20,6 @@ Optimizations (better compiled code)
- Expand current optimizations to all bit widths - Expand current optimizations to all bit widths
- Add a value range propagation pass (for bounds elim & bitwidth reduction) - Add a value range propagation pass (for bounds elim & bitwidth reduction)
- Make dead store pass inter-block - Make dead store pass inter-block
- (x86) Combine loads into other ops
- (x86) More combining address arithmetic into loads/stores - (x86) More combining address arithmetic into loads/stores
- (x86) use ADDQ instead of LEAQ when we can - (x86) use ADDQ instead of LEAQ when we can
- redundant CMP in sequences like this: - redundant CMP in sequences like this:
...@@ -38,8 +37,6 @@ Optimizations (better compiled code) ...@@ -38,8 +37,6 @@ Optimizations (better compiled code)
Same for interfaces? Same for interfaces?
- boolean logic: movb/xorb$1/testb/jeq -> movb/testb/jne - boolean logic: movb/xorb$1/testb/jeq -> movb/testb/jne
- (ADDQconst (SUBQconst x)) and vice-versa - (ADDQconst (SUBQconst x)) and vice-versa
- (CMP (Load ...)) and (CMPconst (Load ...)) in one instruction
(all instructions, really)
- combine LEAQs - combine LEAQs
- store followed by load to same address - store followed by load to same address
- (CMPconst [0] (AND x y)) -> (TEST x y) - (CMPconst [0] (AND x y)) -> (TEST x y)
...@@ -50,6 +47,10 @@ Optimizations (better compiled code) ...@@ -50,6 +47,10 @@ Optimizations (better compiled code)
- better computing of &&/|| in non-if/for contexts - better computing of &&/|| in non-if/for contexts
- OpArrayIndex should take its index in AuxInt, not a full value. - OpArrayIndex should take its index in AuxInt, not a full value.
- remove FLAGS from REP instruction clobbers - remove FLAGS from REP instruction clobbers
- (x86) Combine loads into other ops
Note that this is challenging for ops that generate flags
because flagalloc wants to move those instructions around for
flag regeneration.
Optimizations (better compiler) Optimizations (better compiler)
------------------------------- -------------------------------
......
...@@ -42,11 +42,14 @@ func flagalloc(f *Func) { ...@@ -42,11 +42,14 @@ func flagalloc(f *Func) {
} }
} }
} }
for _, p := range b.Preds { if flag != nil {
end[p.ID] = flag for _, p := range b.Preds {
end[p.ID] = flag
}
} }
} }
} }
// For blocks which have a flags control value, that's the only value // For blocks which have a flags control value, that's the only value
// we can leave in the flags register at the end of the block. (There // we can leave in the flags register at the end of the block. (There
// is no place to put a flag regeneration instruction.) // is no place to put a flag regeneration instruction.)
......
...@@ -556,24 +556,24 @@ ...@@ -556,24 +556,24 @@
(MOVOstore [off1] {sym} (ADDQconst [off2] ptr) val mem) -> (MOVOstore [addOff(off1, off2)] {sym} ptr val mem) (MOVOstore [off1] {sym} (ADDQconst [off2] ptr) val mem) -> (MOVOstore [addOff(off1, off2)] {sym} ptr val mem)
// Fold constants into stores. // Fold constants into stores.
(MOVQstore [off] {sym} ptr (MOVQconst [c]) mem) && validStoreConst(c,off) -> (MOVQstore [off] {sym} ptr (MOVQconst [c]) mem) && validValAndOff(c,off) ->
(MOVQstoreconst [makeStoreConst(c,off)] {sym} ptr mem) (MOVQstoreconst [makeValAndOff(c,off)] {sym} ptr mem)
(MOVLstore [off] {sym} ptr (MOVLconst [c]) mem) && validStoreConstOff(off) -> (MOVLstore [off] {sym} ptr (MOVLconst [c]) mem) && validOff(off) ->
(MOVLstoreconst [makeStoreConst(int64(int32(c)),off)] {sym} ptr mem) (MOVLstoreconst [makeValAndOff(int64(int32(c)),off)] {sym} ptr mem)
(MOVWstore [off] {sym} ptr (MOVWconst [c]) mem) && validStoreConstOff(off) -> (MOVWstore [off] {sym} ptr (MOVWconst [c]) mem) && validOff(off) ->
(MOVWstoreconst [makeStoreConst(int64(int16(c)),off)] {sym} ptr mem) (MOVWstoreconst [makeValAndOff(int64(int16(c)),off)] {sym} ptr mem)
(MOVBstore [off] {sym} ptr (MOVBconst [c]) mem) && validStoreConstOff(off) -> (MOVBstore [off] {sym} ptr (MOVBconst [c]) mem) && validOff(off) ->
(MOVBstoreconst [makeStoreConst(int64(int8(c)),off)] {sym} ptr mem) (MOVBstoreconst [makeValAndOff(int64(int8(c)),off)] {sym} ptr mem)
// Fold address offsets into constant stores. // Fold address offsets into constant stores.
(MOVQstoreconst [sc] {s} (ADDQconst [off] ptr) mem) && StoreConst(sc).canAdd(off) -> (MOVQstoreconst [sc] {s} (ADDQconst [off] ptr) mem) && ValAndOff(sc).canAdd(off) ->
(MOVQstoreconst [StoreConst(sc).add(off)] {s} ptr mem) (MOVQstoreconst [ValAndOff(sc).add(off)] {s} ptr mem)
(MOVLstoreconst [sc] {s} (ADDQconst [off] ptr) mem) && StoreConst(sc).canAdd(off) -> (MOVLstoreconst [sc] {s} (ADDQconst [off] ptr) mem) && ValAndOff(sc).canAdd(off) ->
(MOVLstoreconst [StoreConst(sc).add(off)] {s} ptr mem) (MOVLstoreconst [ValAndOff(sc).add(off)] {s} ptr mem)
(MOVWstoreconst [sc] {s} (ADDQconst [off] ptr) mem) && StoreConst(sc).canAdd(off) -> (MOVWstoreconst [sc] {s} (ADDQconst [off] ptr) mem) && ValAndOff(sc).canAdd(off) ->
(MOVWstoreconst [StoreConst(sc).add(off)] {s} ptr mem) (MOVWstoreconst [ValAndOff(sc).add(off)] {s} ptr mem)
(MOVBstoreconst [sc] {s} (ADDQconst [off] ptr) mem) && StoreConst(sc).canAdd(off) -> (MOVBstoreconst [sc] {s} (ADDQconst [off] ptr) mem) && ValAndOff(sc).canAdd(off) ->
(MOVBstoreconst [StoreConst(sc).add(off)] {s} ptr mem) (MOVBstoreconst [ValAndOff(sc).add(off)] {s} ptr mem)
// We need to fold LEAQ into the MOVx ops so that the live variable analysis knows // We need to fold LEAQ into the MOVx ops so that the live variable analysis knows
// what variables are being read/written by the ops. // what variables are being read/written by the ops.
...@@ -607,14 +607,14 @@ ...@@ -607,14 +607,14 @@
(MOVOstore [off1] {sym1} (LEAQ [off2] {sym2} base) val mem) && canMergeSym(sym1, sym2) -> (MOVOstore [off1] {sym1} (LEAQ [off2] {sym2} base) val mem) && canMergeSym(sym1, sym2) ->
(MOVOstore [addOff(off1,off2)] {mergeSym(sym1,sym2)} base val mem) (MOVOstore [addOff(off1,off2)] {mergeSym(sym1,sym2)} base val mem)
(MOVQstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem) && canMergeSym(sym1, sym2) && StoreConst(sc).canAdd(off) -> (MOVQstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem) && canMergeSym(sym1, sym2) && ValAndOff(sc).canAdd(off) ->
(MOVQstoreconst [StoreConst(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem) (MOVQstoreconst [ValAndOff(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem)
(MOVLstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem) && canMergeSym(sym1, sym2) && StoreConst(sc).canAdd(off) -> (MOVLstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem) && canMergeSym(sym1, sym2) && ValAndOff(sc).canAdd(off) ->
(MOVLstoreconst [StoreConst(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem) (MOVLstoreconst [ValAndOff(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem)
(MOVWstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem) && canMergeSym(sym1, sym2) && StoreConst(sc).canAdd(off) -> (MOVWstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem) && canMergeSym(sym1, sym2) && ValAndOff(sc).canAdd(off) ->
(MOVWstoreconst [StoreConst(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem) (MOVWstoreconst [ValAndOff(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem)
(MOVBstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem) && canMergeSym(sym1, sym2) && StoreConst(sc).canAdd(off) -> (MOVBstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem) && canMergeSym(sym1, sym2) && ValAndOff(sc).canAdd(off) ->
(MOVBstoreconst [StoreConst(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem) (MOVBstoreconst [ValAndOff(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem)
// indexed loads and stores // indexed loads and stores
(MOVQloadidx8 [off1] {sym} (ADDQconst [off2] ptr) idx mem) -> (MOVQloadidx8 [addOff(off1, off2)] {sym} ptr idx mem) (MOVQloadidx8 [off1] {sym} (ADDQconst [off2] ptr) idx mem) -> (MOVQloadidx8 [addOff(off1, off2)] {sym} ptr idx mem)
...@@ -647,16 +647,16 @@ ...@@ -647,16 +647,16 @@
(Zero [8] destptr mem) -> (MOVQstoreconst [0] destptr mem) (Zero [8] destptr mem) -> (MOVQstoreconst [0] destptr mem)
(Zero [3] destptr mem) -> (Zero [3] destptr mem) ->
(MOVBstoreconst [makeStoreConst(0,2)] destptr (MOVBstoreconst [makeValAndOff(0,2)] destptr
(MOVWstoreconst [0] destptr mem)) (MOVWstoreconst [0] destptr mem))
(Zero [5] destptr mem) -> (Zero [5] destptr mem) ->
(MOVBstoreconst [makeStoreConst(0,4)] destptr (MOVBstoreconst [makeValAndOff(0,4)] destptr
(MOVLstoreconst [0] destptr mem)) (MOVLstoreconst [0] destptr mem))
(Zero [6] destptr mem) -> (Zero [6] destptr mem) ->
(MOVWstoreconst [makeStoreConst(0,4)] destptr (MOVWstoreconst [makeValAndOff(0,4)] destptr
(MOVLstoreconst [0] destptr mem)) (MOVLstoreconst [0] destptr mem))
(Zero [7] destptr mem) -> (Zero [7] destptr mem) ->
(MOVLstoreconst [makeStoreConst(0,3)] destptr (MOVLstoreconst [makeValAndOff(0,3)] destptr
(MOVLstoreconst [0] destptr mem)) (MOVLstoreconst [0] destptr mem))
// Strip off any fractional word zeroing. // Strip off any fractional word zeroing.
...@@ -666,16 +666,16 @@ ...@@ -666,16 +666,16 @@
// Zero small numbers of words directly. // Zero small numbers of words directly.
(Zero [16] destptr mem) -> (Zero [16] destptr mem) ->
(MOVQstoreconst [makeStoreConst(0,8)] destptr (MOVQstoreconst [makeValAndOff(0,8)] destptr
(MOVQstoreconst [0] destptr mem)) (MOVQstoreconst [0] destptr mem))
(Zero [24] destptr mem) -> (Zero [24] destptr mem) ->
(MOVQstoreconst [makeStoreConst(0,16)] destptr (MOVQstoreconst [makeValAndOff(0,16)] destptr
(MOVQstoreconst [makeStoreConst(0,8)] destptr (MOVQstoreconst [makeValAndOff(0,8)] destptr
(MOVQstoreconst [0] destptr mem))) (MOVQstoreconst [0] destptr mem)))
(Zero [32] destptr mem) -> (Zero [32] destptr mem) ->
(MOVQstoreconst [makeStoreConst(0,24)] destptr (MOVQstoreconst [makeValAndOff(0,24)] destptr
(MOVQstoreconst [makeStoreConst(0,16)] destptr (MOVQstoreconst [makeValAndOff(0,16)] destptr
(MOVQstoreconst [makeStoreConst(0,8)] destptr (MOVQstoreconst [makeValAndOff(0,8)] destptr
(MOVQstoreconst [0] destptr mem)))) (MOVQstoreconst [0] destptr mem))))
// Medium zeroing uses a duff device. // Medium zeroing uses a duff device.
......
...@@ -382,8 +382,8 @@ func init() { ...@@ -382,8 +382,8 @@ func init() {
// For storeconst ops, the AuxInt field encodes both // For storeconst ops, the AuxInt field encodes both
// the value to store and an address offset of the store. // the value to store and an address offset of the store.
// Cast AuxInt to a StoreConst to extract Val and Off fields. // Cast AuxInt to a ValAndOff to extract Val and Off fields.
{name: "MOVBstoreconst", reg: gpstoreconst, asm: "MOVB", typ: "Mem"}, // store low byte of StoreConst(AuxInt).Val() to arg0+StoreConst(AuxInt).Off()+aux. arg1=mem {name: "MOVBstoreconst", reg: gpstoreconst, asm: "MOVB", typ: "Mem"}, // store low byte of ValAndOff(AuxInt).Val() to arg0+ValAndOff(AuxInt).Off()+aux. arg1=mem
{name: "MOVWstoreconst", reg: gpstoreconst, asm: "MOVW", typ: "Mem"}, // store low 2 bytes of ... {name: "MOVWstoreconst", reg: gpstoreconst, asm: "MOVW", typ: "Mem"}, // store low 2 bytes of ...
{name: "MOVLstoreconst", reg: gpstoreconst, asm: "MOVL", typ: "Mem"}, // store low 4 bytes of ... {name: "MOVLstoreconst", reg: gpstoreconst, asm: "MOVL", typ: "Mem"}, // store low 4 bytes of ...
{name: "MOVQstoreconst", reg: gpstoreconst, asm: "MOVQ", typ: "Mem"}, // store 8 bytes of ... {name: "MOVQstoreconst", reg: gpstoreconst, asm: "MOVQ", typ: "Mem"}, // store 8 bytes of ...
......
...@@ -4,6 +4,8 @@ ...@@ -4,6 +4,8 @@
package ssa package ssa
import "fmt"
// An Op encodes the specific operation that a Value performs. // An Op encodes the specific operation that a Value performs.
// Opcodes' semantics can be modified by the type and aux fields of the Value. // Opcodes' semantics can be modified by the type and aux fields of the Value.
// For instance, OpAdd can be 32 or 64 bit, signed or unsigned, float or complex, depending on Value.Type. // For instance, OpAdd can be 32 or 64 bit, signed or unsigned, float or complex, depending on Value.Type.
...@@ -30,57 +32,67 @@ type regInfo struct { ...@@ -30,57 +32,67 @@ type regInfo struct {
outputs []regMask // NOTE: values can only have 1 output for now. outputs []regMask // NOTE: values can only have 1 output for now.
} }
// A StoreConst is used by the MOVXstoreconst opcodes. It holds // A ValAndOff is used by the several opcodes. It holds
// both the value to store and an offset from the store pointer. // both a value and a pointer offset.
// A StoreConst is intended to be encoded into an AuxInt field. // A ValAndOff is intended to be encoded into an AuxInt field.
// The zero StoreConst encodes a value of 0 and an offset of 0. // The zero ValAndOff encodes a value of 0 and an offset of 0.
// The high 32 bits hold a value to be stored. // The high 32 bits hold a value.
// The low 32 bits hold a pointer offset. // The low 32 bits hold a pointer offset.
type StoreConst int64 type ValAndOff int64
func (sc StoreConst) Val() int64 { func (x ValAndOff) Val() int64 {
return int64(sc) >> 32 return int64(x) >> 32
}
func (x ValAndOff) Off() int64 {
return int64(int32(x))
} }
func (sc StoreConst) Off() int64 { func (x ValAndOff) Int64() int64 {
return int64(int32(sc)) return int64(x)
} }
func (sc StoreConst) Int64() int64 { func (x ValAndOff) String() string {
return int64(sc) return fmt.Sprintf("val=%d,off=%d", x.Val(), x.Off())
} }
// validStoreConstOff reports whether the offset can be used // validVal reports whether the value can be used
// as an argument to makeStoreConst. // as an argument to makeValAndOff.
func validStoreConstOff(off int64) bool { func validVal(val int64) bool {
return val == int64(int32(val))
}
// validOff reports whether the offset can be used
// as an argument to makeValAndOff.
func validOff(off int64) bool {
return off == int64(int32(off)) return off == int64(int32(off))
} }
// validStoreConst reports whether we can fit the value and offset into // validValAndOff reports whether we can fit the value and offset into
// a StoreConst value. // a ValAndOff value.
func validStoreConst(val, off int64) bool { func validValAndOff(val, off int64) bool {
if val != int64(int32(val)) { if !validVal(val) {
return false return false
} }
if !validStoreConstOff(off) { if !validOff(off) {
return false return false
} }
return true return true
} }
// encode encodes a StoreConst into an int64 suitable for storing in an AuxInt field. // makeValAndOff encodes a ValAndOff into an int64 suitable for storing in an AuxInt field.
func makeStoreConst(val, off int64) int64 { func makeValAndOff(val, off int64) int64 {
if !validStoreConst(val, off) { if !validValAndOff(val, off) {
panic("invalid makeStoreConst") panic("invalid makeValAndOff")
} }
return StoreConst(val<<32 + int64(uint32(off))).Int64() return ValAndOff(val<<32 + int64(uint32(off))).Int64()
} }
func (sc StoreConst) canAdd(off int64) bool { func (x ValAndOff) canAdd(off int64) bool {
newoff := sc.Off() + off newoff := x.Off() + off
return newoff == int64(int32(newoff)) return newoff == int64(int32(newoff))
} }
func (sc StoreConst) add(off int64) int64 {
if !sc.canAdd(off) { func (x ValAndOff) add(off int64) int64 {
panic("invalid StoreConst.add") if !x.canAdd(off) {
panic("invalid ValAndOff.add")
} }
return makeStoreConst(sc.Val(), sc.Off()+off) return makeValAndOff(x.Val(), x.Off()+off)
} }
...@@ -6059,32 +6059,32 @@ end3a2e55db7e03920700c4875f6a55de3b: ...@@ -6059,32 +6059,32 @@ end3a2e55db7e03920700c4875f6a55de3b:
ende6347ac19d0469ee59d2e7f2e18d1070: ende6347ac19d0469ee59d2e7f2e18d1070:
; ;
// match: (MOVBstore [off] {sym} ptr (MOVBconst [c]) mem) // match: (MOVBstore [off] {sym} ptr (MOVBconst [c]) mem)
// cond: validStoreConstOff(off) // cond: validOff(off)
// result: (MOVBstoreconst [makeStoreConst(int64(int8(c)),off)] {sym} ptr mem) // result: (MOVBstoreconst [makeValAndOff(int64(int8(c)),off)] {sym} ptr mem)
{ {
off := v.AuxInt off := v.AuxInt
sym := v.Aux sym := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
if v.Args[1].Op != OpAMD64MOVBconst { if v.Args[1].Op != OpAMD64MOVBconst {
goto enda8ebda583a842dae6377b7f562040318 goto endfdf24c49923451a076f1868988b8c9d9
} }
c := v.Args[1].AuxInt c := v.Args[1].AuxInt
mem := v.Args[2] mem := v.Args[2]
if !(validStoreConstOff(off)) { if !(validOff(off)) {
goto enda8ebda583a842dae6377b7f562040318 goto endfdf24c49923451a076f1868988b8c9d9
} }
v.Op = OpAMD64MOVBstoreconst v.Op = OpAMD64MOVBstoreconst
v.AuxInt = 0 v.AuxInt = 0
v.Aux = nil v.Aux = nil
v.resetArgs() v.resetArgs()
v.AuxInt = makeStoreConst(int64(int8(c)), off) v.AuxInt = makeValAndOff(int64(int8(c)), off)
v.Aux = sym v.Aux = sym
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
goto enda8ebda583a842dae6377b7f562040318 goto endfdf24c49923451a076f1868988b8c9d9
enda8ebda583a842dae6377b7f562040318: endfdf24c49923451a076f1868988b8c9d9:
; ;
// match: (MOVBstore [off1] {sym1} (LEAQ [off2] {sym2} base) val mem) // match: (MOVBstore [off1] {sym1} (LEAQ [off2] {sym2} base) val mem)
// cond: canMergeSym(sym1, sym2) // cond: canMergeSym(sym1, sym2)
...@@ -6123,61 +6123,61 @@ func rewriteValueAMD64_OpAMD64MOVBstoreconst(v *Value, config *Config) bool { ...@@ -6123,61 +6123,61 @@ func rewriteValueAMD64_OpAMD64MOVBstoreconst(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (MOVBstoreconst [sc] {s} (ADDQconst [off] ptr) mem) // match: (MOVBstoreconst [sc] {s} (ADDQconst [off] ptr) mem)
// cond: StoreConst(sc).canAdd(off) // cond: ValAndOff(sc).canAdd(off)
// result: (MOVBstoreconst [StoreConst(sc).add(off)] {s} ptr mem) // result: (MOVBstoreconst [ValAndOff(sc).add(off)] {s} ptr mem)
{ {
sc := v.AuxInt sc := v.AuxInt
s := v.Aux s := v.Aux
if v.Args[0].Op != OpAMD64ADDQconst { if v.Args[0].Op != OpAMD64ADDQconst {
goto ende1cdf6d463f91ba4dd1956f8ba4cb128 goto end8d35ca650b7c40bc43984d3f5925a052
} }
off := v.Args[0].AuxInt off := v.Args[0].AuxInt
ptr := v.Args[0].Args[0] ptr := v.Args[0].Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(StoreConst(sc).canAdd(off)) { if !(ValAndOff(sc).canAdd(off)) {
goto ende1cdf6d463f91ba4dd1956f8ba4cb128 goto end8d35ca650b7c40bc43984d3f5925a052
} }
v.Op = OpAMD64MOVBstoreconst v.Op = OpAMD64MOVBstoreconst
v.AuxInt = 0 v.AuxInt = 0
v.Aux = nil v.Aux = nil
v.resetArgs() v.resetArgs()
v.AuxInt = StoreConst(sc).add(off) v.AuxInt = ValAndOff(sc).add(off)
v.Aux = s v.Aux = s
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
goto ende1cdf6d463f91ba4dd1956f8ba4cb128 goto end8d35ca650b7c40bc43984d3f5925a052
ende1cdf6d463f91ba4dd1956f8ba4cb128: end8d35ca650b7c40bc43984d3f5925a052:
; ;
// match: (MOVBstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem) // match: (MOVBstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem)
// cond: canMergeSym(sym1, sym2) && StoreConst(sc).canAdd(off) // cond: canMergeSym(sym1, sym2) && ValAndOff(sc).canAdd(off)
// result: (MOVBstoreconst [StoreConst(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem) // result: (MOVBstoreconst [ValAndOff(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem)
{ {
sc := v.AuxInt sc := v.AuxInt
sym1 := v.Aux sym1 := v.Aux
if v.Args[0].Op != OpAMD64LEAQ { if v.Args[0].Op != OpAMD64LEAQ {
goto end5feed29bca3ce7d5fccda89acf71c855 goto end8deb839acf84818dd8fc827c0338f42c
} }
off := v.Args[0].AuxInt off := v.Args[0].AuxInt
sym2 := v.Args[0].Aux sym2 := v.Args[0].Aux
ptr := v.Args[0].Args[0] ptr := v.Args[0].Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(canMergeSym(sym1, sym2) && StoreConst(sc).canAdd(off)) { if !(canMergeSym(sym1, sym2) && ValAndOff(sc).canAdd(off)) {
goto end5feed29bca3ce7d5fccda89acf71c855 goto end8deb839acf84818dd8fc827c0338f42c
} }
v.Op = OpAMD64MOVBstoreconst v.Op = OpAMD64MOVBstoreconst
v.AuxInt = 0 v.AuxInt = 0
v.Aux = nil v.Aux = nil
v.resetArgs() v.resetArgs()
v.AuxInt = StoreConst(sc).add(off) v.AuxInt = ValAndOff(sc).add(off)
v.Aux = mergeSym(sym1, sym2) v.Aux = mergeSym(sym1, sym2)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
goto end5feed29bca3ce7d5fccda89acf71c855 goto end8deb839acf84818dd8fc827c0338f42c
end5feed29bca3ce7d5fccda89acf71c855: end8deb839acf84818dd8fc827c0338f42c:
; ;
return false return false
} }
...@@ -6323,32 +6323,32 @@ end199e8c23a5e7e99728a43d6a83b2c2cf: ...@@ -6323,32 +6323,32 @@ end199e8c23a5e7e99728a43d6a83b2c2cf:
end43bffdb8d9c1fc85a95778d4911955f1: end43bffdb8d9c1fc85a95778d4911955f1:
; ;
// match: (MOVLstore [off] {sym} ptr (MOVLconst [c]) mem) // match: (MOVLstore [off] {sym} ptr (MOVLconst [c]) mem)
// cond: validStoreConstOff(off) // cond: validOff(off)
// result: (MOVLstoreconst [makeStoreConst(int64(int32(c)),off)] {sym} ptr mem) // result: (MOVLstoreconst [makeValAndOff(int64(int32(c)),off)] {sym} ptr mem)
{ {
off := v.AuxInt off := v.AuxInt
sym := v.Aux sym := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
if v.Args[1].Op != OpAMD64MOVLconst { if v.Args[1].Op != OpAMD64MOVLconst {
goto end14bc0c027d67d279cf3ef2038b759ce2 goto enda62a54c45bf42db801af4095d27faccd
} }
c := v.Args[1].AuxInt c := v.Args[1].AuxInt
mem := v.Args[2] mem := v.Args[2]
if !(validStoreConstOff(off)) { if !(validOff(off)) {
goto end14bc0c027d67d279cf3ef2038b759ce2 goto enda62a54c45bf42db801af4095d27faccd
} }
v.Op = OpAMD64MOVLstoreconst v.Op = OpAMD64MOVLstoreconst
v.AuxInt = 0 v.AuxInt = 0
v.Aux = nil v.Aux = nil
v.resetArgs() v.resetArgs()
v.AuxInt = makeStoreConst(int64(int32(c)), off) v.AuxInt = makeValAndOff(int64(int32(c)), off)
v.Aux = sym v.Aux = sym
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
goto end14bc0c027d67d279cf3ef2038b759ce2 goto enda62a54c45bf42db801af4095d27faccd
end14bc0c027d67d279cf3ef2038b759ce2: enda62a54c45bf42db801af4095d27faccd:
; ;
// match: (MOVLstore [off1] {sym1} (LEAQ [off2] {sym2} base) val mem) // match: (MOVLstore [off1] {sym1} (LEAQ [off2] {sym2} base) val mem)
// cond: canMergeSym(sym1, sym2) // cond: canMergeSym(sym1, sym2)
...@@ -6387,61 +6387,61 @@ func rewriteValueAMD64_OpAMD64MOVLstoreconst(v *Value, config *Config) bool { ...@@ -6387,61 +6387,61 @@ func rewriteValueAMD64_OpAMD64MOVLstoreconst(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (MOVLstoreconst [sc] {s} (ADDQconst [off] ptr) mem) // match: (MOVLstoreconst [sc] {s} (ADDQconst [off] ptr) mem)
// cond: StoreConst(sc).canAdd(off) // cond: ValAndOff(sc).canAdd(off)
// result: (MOVLstoreconst [StoreConst(sc).add(off)] {s} ptr mem) // result: (MOVLstoreconst [ValAndOff(sc).add(off)] {s} ptr mem)
{ {
sc := v.AuxInt sc := v.AuxInt
s := v.Aux s := v.Aux
if v.Args[0].Op != OpAMD64ADDQconst { if v.Args[0].Op != OpAMD64ADDQconst {
goto end7665f96d0aaa57009bf98632f19bf8e7 goto end4981598152dd0763f1d735810a7d34e8
} }
off := v.Args[0].AuxInt off := v.Args[0].AuxInt
ptr := v.Args[0].Args[0] ptr := v.Args[0].Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(StoreConst(sc).canAdd(off)) { if !(ValAndOff(sc).canAdd(off)) {
goto end7665f96d0aaa57009bf98632f19bf8e7 goto end4981598152dd0763f1d735810a7d34e8
} }
v.Op = OpAMD64MOVLstoreconst v.Op = OpAMD64MOVLstoreconst
v.AuxInt = 0 v.AuxInt = 0
v.Aux = nil v.Aux = nil
v.resetArgs() v.resetArgs()
v.AuxInt = StoreConst(sc).add(off) v.AuxInt = ValAndOff(sc).add(off)
v.Aux = s v.Aux = s
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
goto end7665f96d0aaa57009bf98632f19bf8e7 goto end4981598152dd0763f1d735810a7d34e8
end7665f96d0aaa57009bf98632f19bf8e7: end4981598152dd0763f1d735810a7d34e8:
; ;
// match: (MOVLstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem) // match: (MOVLstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem)
// cond: canMergeSym(sym1, sym2) && StoreConst(sc).canAdd(off) // cond: canMergeSym(sym1, sym2) && ValAndOff(sc).canAdd(off)
// result: (MOVLstoreconst [StoreConst(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem) // result: (MOVLstoreconst [ValAndOff(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem)
{ {
sc := v.AuxInt sc := v.AuxInt
sym1 := v.Aux sym1 := v.Aux
if v.Args[0].Op != OpAMD64LEAQ { if v.Args[0].Op != OpAMD64LEAQ {
goto end1664c6056a9c65fcbe30eca273e8ee64 goto endd579250954b5df84a77518b36f739e12
} }
off := v.Args[0].AuxInt off := v.Args[0].AuxInt
sym2 := v.Args[0].Aux sym2 := v.Args[0].Aux
ptr := v.Args[0].Args[0] ptr := v.Args[0].Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(canMergeSym(sym1, sym2) && StoreConst(sc).canAdd(off)) { if !(canMergeSym(sym1, sym2) && ValAndOff(sc).canAdd(off)) {
goto end1664c6056a9c65fcbe30eca273e8ee64 goto endd579250954b5df84a77518b36f739e12
} }
v.Op = OpAMD64MOVLstoreconst v.Op = OpAMD64MOVLstoreconst
v.AuxInt = 0 v.AuxInt = 0
v.Aux = nil v.Aux = nil
v.resetArgs() v.resetArgs()
v.AuxInt = StoreConst(sc).add(off) v.AuxInt = ValAndOff(sc).add(off)
v.Aux = mergeSym(sym1, sym2) v.Aux = mergeSym(sym1, sym2)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
goto end1664c6056a9c65fcbe30eca273e8ee64 goto endd579250954b5df84a77518b36f739e12
end1664c6056a9c65fcbe30eca273e8ee64: endd579250954b5df84a77518b36f739e12:
; ;
return false return false
} }
...@@ -6720,32 +6720,32 @@ func rewriteValueAMD64_OpAMD64MOVQstore(v *Value, config *Config) bool { ...@@ -6720,32 +6720,32 @@ func rewriteValueAMD64_OpAMD64MOVQstore(v *Value, config *Config) bool {
end0a110b5e42a4576c32fda50590092848: end0a110b5e42a4576c32fda50590092848:
; ;
// match: (MOVQstore [off] {sym} ptr (MOVQconst [c]) mem) // match: (MOVQstore [off] {sym} ptr (MOVQconst [c]) mem)
// cond: validStoreConst(c,off) // cond: validValAndOff(c,off)
// result: (MOVQstoreconst [makeStoreConst(c,off)] {sym} ptr mem) // result: (MOVQstoreconst [makeValAndOff(c,off)] {sym} ptr mem)
{ {
off := v.AuxInt off := v.AuxInt
sym := v.Aux sym := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
if v.Args[1].Op != OpAMD64MOVQconst { if v.Args[1].Op != OpAMD64MOVQconst {
goto end8368f37d24b6a2f59c3d00966c4d4111 goto endda0f4b36e19753762dbd1c6ee05e4c81
} }
c := v.Args[1].AuxInt c := v.Args[1].AuxInt
mem := v.Args[2] mem := v.Args[2]
if !(validStoreConst(c, off)) { if !(validValAndOff(c, off)) {
goto end8368f37d24b6a2f59c3d00966c4d4111 goto endda0f4b36e19753762dbd1c6ee05e4c81
} }
v.Op = OpAMD64MOVQstoreconst v.Op = OpAMD64MOVQstoreconst
v.AuxInt = 0 v.AuxInt = 0
v.Aux = nil v.Aux = nil
v.resetArgs() v.resetArgs()
v.AuxInt = makeStoreConst(c, off) v.AuxInt = makeValAndOff(c, off)
v.Aux = sym v.Aux = sym
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
goto end8368f37d24b6a2f59c3d00966c4d4111 goto endda0f4b36e19753762dbd1c6ee05e4c81
end8368f37d24b6a2f59c3d00966c4d4111: endda0f4b36e19753762dbd1c6ee05e4c81:
; ;
// match: (MOVQstore [off1] {sym1} (LEAQ [off2] {sym2} base) val mem) // match: (MOVQstore [off1] {sym1} (LEAQ [off2] {sym2} base) val mem)
// cond: canMergeSym(sym1, sym2) // cond: canMergeSym(sym1, sym2)
...@@ -6817,61 +6817,61 @@ func rewriteValueAMD64_OpAMD64MOVQstoreconst(v *Value, config *Config) bool { ...@@ -6817,61 +6817,61 @@ func rewriteValueAMD64_OpAMD64MOVQstoreconst(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (MOVQstoreconst [sc] {s} (ADDQconst [off] ptr) mem) // match: (MOVQstoreconst [sc] {s} (ADDQconst [off] ptr) mem)
// cond: StoreConst(sc).canAdd(off) // cond: ValAndOff(sc).canAdd(off)
// result: (MOVQstoreconst [StoreConst(sc).add(off)] {s} ptr mem) // result: (MOVQstoreconst [ValAndOff(sc).add(off)] {s} ptr mem)
{ {
sc := v.AuxInt sc := v.AuxInt
s := v.Aux s := v.Aux
if v.Args[0].Op != OpAMD64ADDQconst { if v.Args[0].Op != OpAMD64ADDQconst {
goto end5826e30265c68ea8c4cd595ceedf9405 goto end3694207cd20e8e1cc719e179bdfe0c74
} }
off := v.Args[0].AuxInt off := v.Args[0].AuxInt
ptr := v.Args[0].Args[0] ptr := v.Args[0].Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(StoreConst(sc).canAdd(off)) { if !(ValAndOff(sc).canAdd(off)) {
goto end5826e30265c68ea8c4cd595ceedf9405 goto end3694207cd20e8e1cc719e179bdfe0c74
} }
v.Op = OpAMD64MOVQstoreconst v.Op = OpAMD64MOVQstoreconst
v.AuxInt = 0 v.AuxInt = 0
v.Aux = nil v.Aux = nil
v.resetArgs() v.resetArgs()
v.AuxInt = StoreConst(sc).add(off) v.AuxInt = ValAndOff(sc).add(off)
v.Aux = s v.Aux = s
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
goto end5826e30265c68ea8c4cd595ceedf9405 goto end3694207cd20e8e1cc719e179bdfe0c74
end5826e30265c68ea8c4cd595ceedf9405: end3694207cd20e8e1cc719e179bdfe0c74:
; ;
// match: (MOVQstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem) // match: (MOVQstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem)
// cond: canMergeSym(sym1, sym2) && StoreConst(sc).canAdd(off) // cond: canMergeSym(sym1, sym2) && ValAndOff(sc).canAdd(off)
// result: (MOVQstoreconst [StoreConst(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem) // result: (MOVQstoreconst [ValAndOff(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem)
{ {
sc := v.AuxInt sc := v.AuxInt
sym1 := v.Aux sym1 := v.Aux
if v.Args[0].Op != OpAMD64LEAQ { if v.Args[0].Op != OpAMD64LEAQ {
goto endb9c7f7a9dbc6b885d84f851c74b018e5 goto endf405b27b22dbf76f83abd1b5ad5e53d9
} }
off := v.Args[0].AuxInt off := v.Args[0].AuxInt
sym2 := v.Args[0].Aux sym2 := v.Args[0].Aux
ptr := v.Args[0].Args[0] ptr := v.Args[0].Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(canMergeSym(sym1, sym2) && StoreConst(sc).canAdd(off)) { if !(canMergeSym(sym1, sym2) && ValAndOff(sc).canAdd(off)) {
goto endb9c7f7a9dbc6b885d84f851c74b018e5 goto endf405b27b22dbf76f83abd1b5ad5e53d9
} }
v.Op = OpAMD64MOVQstoreconst v.Op = OpAMD64MOVQstoreconst
v.AuxInt = 0 v.AuxInt = 0
v.Aux = nil v.Aux = nil
v.resetArgs() v.resetArgs()
v.AuxInt = StoreConst(sc).add(off) v.AuxInt = ValAndOff(sc).add(off)
v.Aux = mergeSym(sym1, sym2) v.Aux = mergeSym(sym1, sym2)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
goto endb9c7f7a9dbc6b885d84f851c74b018e5 goto endf405b27b22dbf76f83abd1b5ad5e53d9
endb9c7f7a9dbc6b885d84f851c74b018e5: endf405b27b22dbf76f83abd1b5ad5e53d9:
; ;
return false return false
} }
...@@ -7567,32 +7567,32 @@ end187fe73dfaf9cf5f4c349283b4dfd9d1: ...@@ -7567,32 +7567,32 @@ end187fe73dfaf9cf5f4c349283b4dfd9d1:
endda15fdd59aa956ded0440188f38de1aa: endda15fdd59aa956ded0440188f38de1aa:
; ;
// match: (MOVWstore [off] {sym} ptr (MOVWconst [c]) mem) // match: (MOVWstore [off] {sym} ptr (MOVWconst [c]) mem)
// cond: validStoreConstOff(off) // cond: validOff(off)
// result: (MOVWstoreconst [makeStoreConst(int64(int16(c)),off)] {sym} ptr mem) // result: (MOVWstoreconst [makeValAndOff(int64(int16(c)),off)] {sym} ptr mem)
{ {
off := v.AuxInt off := v.AuxInt
sym := v.Aux sym := v.Aux
ptr := v.Args[0] ptr := v.Args[0]
if v.Args[1].Op != OpAMD64MOVWconst { if v.Args[1].Op != OpAMD64MOVWconst {
goto end226f449215b8ea54ac24fb8d52356ffa goto end60327daf9965d73a8c1971d098e1e31d
} }
c := v.Args[1].AuxInt c := v.Args[1].AuxInt
mem := v.Args[2] mem := v.Args[2]
if !(validStoreConstOff(off)) { if !(validOff(off)) {
goto end226f449215b8ea54ac24fb8d52356ffa goto end60327daf9965d73a8c1971d098e1e31d
} }
v.Op = OpAMD64MOVWstoreconst v.Op = OpAMD64MOVWstoreconst
v.AuxInt = 0 v.AuxInt = 0
v.Aux = nil v.Aux = nil
v.resetArgs() v.resetArgs()
v.AuxInt = makeStoreConst(int64(int16(c)), off) v.AuxInt = makeValAndOff(int64(int16(c)), off)
v.Aux = sym v.Aux = sym
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
goto end226f449215b8ea54ac24fb8d52356ffa goto end60327daf9965d73a8c1971d098e1e31d
end226f449215b8ea54ac24fb8d52356ffa: end60327daf9965d73a8c1971d098e1e31d:
; ;
// match: (MOVWstore [off1] {sym1} (LEAQ [off2] {sym2} base) val mem) // match: (MOVWstore [off1] {sym1} (LEAQ [off2] {sym2} base) val mem)
// cond: canMergeSym(sym1, sym2) // cond: canMergeSym(sym1, sym2)
...@@ -7631,61 +7631,61 @@ func rewriteValueAMD64_OpAMD64MOVWstoreconst(v *Value, config *Config) bool { ...@@ -7631,61 +7631,61 @@ func rewriteValueAMD64_OpAMD64MOVWstoreconst(v *Value, config *Config) bool {
b := v.Block b := v.Block
_ = b _ = b
// match: (MOVWstoreconst [sc] {s} (ADDQconst [off] ptr) mem) // match: (MOVWstoreconst [sc] {s} (ADDQconst [off] ptr) mem)
// cond: StoreConst(sc).canAdd(off) // cond: ValAndOff(sc).canAdd(off)
// result: (MOVWstoreconst [StoreConst(sc).add(off)] {s} ptr mem) // result: (MOVWstoreconst [ValAndOff(sc).add(off)] {s} ptr mem)
{ {
sc := v.AuxInt sc := v.AuxInt
s := v.Aux s := v.Aux
if v.Args[0].Op != OpAMD64ADDQconst { if v.Args[0].Op != OpAMD64ADDQconst {
goto end2b764f9cf1bb32af25ba4e70a6705b91 goto end8825edac065f0e1c615ca5e6ba40e2de
} }
off := v.Args[0].AuxInt off := v.Args[0].AuxInt
ptr := v.Args[0].Args[0] ptr := v.Args[0].Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(StoreConst(sc).canAdd(off)) { if !(ValAndOff(sc).canAdd(off)) {
goto end2b764f9cf1bb32af25ba4e70a6705b91 goto end8825edac065f0e1c615ca5e6ba40e2de
} }
v.Op = OpAMD64MOVWstoreconst v.Op = OpAMD64MOVWstoreconst
v.AuxInt = 0 v.AuxInt = 0
v.Aux = nil v.Aux = nil
v.resetArgs() v.resetArgs()
v.AuxInt = StoreConst(sc).add(off) v.AuxInt = ValAndOff(sc).add(off)
v.Aux = s v.Aux = s
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
goto end2b764f9cf1bb32af25ba4e70a6705b91 goto end8825edac065f0e1c615ca5e6ba40e2de
end2b764f9cf1bb32af25ba4e70a6705b91: end8825edac065f0e1c615ca5e6ba40e2de:
; ;
// match: (MOVWstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem) // match: (MOVWstoreconst [sc] {sym1} (LEAQ [off] {sym2} ptr) mem)
// cond: canMergeSym(sym1, sym2) && StoreConst(sc).canAdd(off) // cond: canMergeSym(sym1, sym2) && ValAndOff(sc).canAdd(off)
// result: (MOVWstoreconst [StoreConst(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem) // result: (MOVWstoreconst [ValAndOff(sc).add(off)] {mergeSym(sym1, sym2)} ptr mem)
{ {
sc := v.AuxInt sc := v.AuxInt
sym1 := v.Aux sym1 := v.Aux
if v.Args[0].Op != OpAMD64LEAQ { if v.Args[0].Op != OpAMD64LEAQ {
goto enda15bfd8d540015b2245c65be486d2ffd goto endba47397e07b40a64fa4cad36ac2e32ad
} }
off := v.Args[0].AuxInt off := v.Args[0].AuxInt
sym2 := v.Args[0].Aux sym2 := v.Args[0].Aux
ptr := v.Args[0].Args[0] ptr := v.Args[0].Args[0]
mem := v.Args[1] mem := v.Args[1]
if !(canMergeSym(sym1, sym2) && StoreConst(sc).canAdd(off)) { if !(canMergeSym(sym1, sym2) && ValAndOff(sc).canAdd(off)) {
goto enda15bfd8d540015b2245c65be486d2ffd goto endba47397e07b40a64fa4cad36ac2e32ad
} }
v.Op = OpAMD64MOVWstoreconst v.Op = OpAMD64MOVWstoreconst
v.AuxInt = 0 v.AuxInt = 0
v.Aux = nil v.Aux = nil
v.resetArgs() v.resetArgs()
v.AuxInt = StoreConst(sc).add(off) v.AuxInt = ValAndOff(sc).add(off)
v.Aux = mergeSym(sym1, sym2) v.Aux = mergeSym(sym1, sym2)
v.AddArg(ptr) v.AddArg(ptr)
v.AddArg(mem) v.AddArg(mem)
return true return true
} }
goto enda15bfd8d540015b2245c65be486d2ffd goto endba47397e07b40a64fa4cad36ac2e32ad
enda15bfd8d540015b2245c65be486d2ffd: endba47397e07b40a64fa4cad36ac2e32ad:
; ;
return false return false
} }
...@@ -14596,10 +14596,10 @@ end07aaaebfa15a48c52cd79b68e28d266f: ...@@ -14596,10 +14596,10 @@ end07aaaebfa15a48c52cd79b68e28d266f:
; ;
// match: (Zero [3] destptr mem) // match: (Zero [3] destptr mem)
// cond: // cond:
// result: (MOVBstoreconst [makeStoreConst(0,2)] destptr (MOVWstoreconst [0] destptr mem)) // result: (MOVBstoreconst [makeValAndOff(0,2)] destptr (MOVWstoreconst [0] destptr mem))
{ {
if v.AuxInt != 3 { if v.AuxInt != 3 {
goto end03b2ae08f901891919e454f05273fb4e goto end3bf4a24a87e0727b9bcfbb5fcd24aabe
} }
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
...@@ -14607,7 +14607,7 @@ end07aaaebfa15a48c52cd79b68e28d266f: ...@@ -14607,7 +14607,7 @@ end07aaaebfa15a48c52cd79b68e28d266f:
v.AuxInt = 0 v.AuxInt = 0
v.Aux = nil v.Aux = nil
v.resetArgs() v.resetArgs()
v.AuxInt = makeStoreConst(0, 2) v.AuxInt = makeValAndOff(0, 2)
v.AddArg(destptr) v.AddArg(destptr)
v0 := b.NewValue0(v.Line, OpAMD64MOVWstoreconst, TypeInvalid) v0 := b.NewValue0(v.Line, OpAMD64MOVWstoreconst, TypeInvalid)
v0.AuxInt = 0 v0.AuxInt = 0
...@@ -14617,15 +14617,15 @@ end07aaaebfa15a48c52cd79b68e28d266f: ...@@ -14617,15 +14617,15 @@ end07aaaebfa15a48c52cd79b68e28d266f:
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
goto end03b2ae08f901891919e454f05273fb4e goto end3bf4a24a87e0727b9bcfbb5fcd24aabe
end03b2ae08f901891919e454f05273fb4e: end3bf4a24a87e0727b9bcfbb5fcd24aabe:
; ;
// match: (Zero [5] destptr mem) // match: (Zero [5] destptr mem)
// cond: // cond:
// result: (MOVBstoreconst [makeStoreConst(0,4)] destptr (MOVLstoreconst [0] destptr mem)) // result: (MOVBstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [0] destptr mem))
{ {
if v.AuxInt != 5 { if v.AuxInt != 5 {
goto endc473059deb6291d483262b08312eab48 goto end567e4a90c6867faf1dfc2cd57daf2ce4
} }
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
...@@ -14633,7 +14633,7 @@ end03b2ae08f901891919e454f05273fb4e: ...@@ -14633,7 +14633,7 @@ end03b2ae08f901891919e454f05273fb4e:
v.AuxInt = 0 v.AuxInt = 0
v.Aux = nil v.Aux = nil
v.resetArgs() v.resetArgs()
v.AuxInt = makeStoreConst(0, 4) v.AuxInt = makeValAndOff(0, 4)
v.AddArg(destptr) v.AddArg(destptr)
v0 := b.NewValue0(v.Line, OpAMD64MOVLstoreconst, TypeInvalid) v0 := b.NewValue0(v.Line, OpAMD64MOVLstoreconst, TypeInvalid)
v0.AuxInt = 0 v0.AuxInt = 0
...@@ -14643,15 +14643,15 @@ end03b2ae08f901891919e454f05273fb4e: ...@@ -14643,15 +14643,15 @@ end03b2ae08f901891919e454f05273fb4e:
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
goto endc473059deb6291d483262b08312eab48 goto end567e4a90c6867faf1dfc2cd57daf2ce4
endc473059deb6291d483262b08312eab48: end567e4a90c6867faf1dfc2cd57daf2ce4:
; ;
// match: (Zero [6] destptr mem) // match: (Zero [6] destptr mem)
// cond: // cond:
// result: (MOVWstoreconst [makeStoreConst(0,4)] destptr (MOVLstoreconst [0] destptr mem)) // result: (MOVWstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [0] destptr mem))
{ {
if v.AuxInt != 6 { if v.AuxInt != 6 {
goto end41b38839f25e3749384d53b5945bd56b goto end7cddcaf215fcc2cbca9aa958147b2380
} }
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
...@@ -14659,7 +14659,7 @@ endc473059deb6291d483262b08312eab48: ...@@ -14659,7 +14659,7 @@ endc473059deb6291d483262b08312eab48:
v.AuxInt = 0 v.AuxInt = 0
v.Aux = nil v.Aux = nil
v.resetArgs() v.resetArgs()
v.AuxInt = makeStoreConst(0, 4) v.AuxInt = makeValAndOff(0, 4)
v.AddArg(destptr) v.AddArg(destptr)
v0 := b.NewValue0(v.Line, OpAMD64MOVLstoreconst, TypeInvalid) v0 := b.NewValue0(v.Line, OpAMD64MOVLstoreconst, TypeInvalid)
v0.AuxInt = 0 v0.AuxInt = 0
...@@ -14669,15 +14669,15 @@ endc473059deb6291d483262b08312eab48: ...@@ -14669,15 +14669,15 @@ endc473059deb6291d483262b08312eab48:
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
goto end41b38839f25e3749384d53b5945bd56b goto end7cddcaf215fcc2cbca9aa958147b2380
end41b38839f25e3749384d53b5945bd56b: end7cddcaf215fcc2cbca9aa958147b2380:
; ;
// match: (Zero [7] destptr mem) // match: (Zero [7] destptr mem)
// cond: // cond:
// result: (MOVLstoreconst [makeStoreConst(0,3)] destptr (MOVLstoreconst [0] destptr mem)) // result: (MOVLstoreconst [makeValAndOff(0,3)] destptr (MOVLstoreconst [0] destptr mem))
{ {
if v.AuxInt != 7 { if v.AuxInt != 7 {
goto end06e677d4c1ac43e08783eb8117a589b6 goto end1b58cabccbc912ea4e1cf99be8a9fbf7
} }
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
...@@ -14685,7 +14685,7 @@ end41b38839f25e3749384d53b5945bd56b: ...@@ -14685,7 +14685,7 @@ end41b38839f25e3749384d53b5945bd56b:
v.AuxInt = 0 v.AuxInt = 0
v.Aux = nil v.Aux = nil
v.resetArgs() v.resetArgs()
v.AuxInt = makeStoreConst(0, 3) v.AuxInt = makeValAndOff(0, 3)
v.AddArg(destptr) v.AddArg(destptr)
v0 := b.NewValue0(v.Line, OpAMD64MOVLstoreconst, TypeInvalid) v0 := b.NewValue0(v.Line, OpAMD64MOVLstoreconst, TypeInvalid)
v0.AuxInt = 0 v0.AuxInt = 0
...@@ -14695,8 +14695,8 @@ end41b38839f25e3749384d53b5945bd56b: ...@@ -14695,8 +14695,8 @@ end41b38839f25e3749384d53b5945bd56b:
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
goto end06e677d4c1ac43e08783eb8117a589b6 goto end1b58cabccbc912ea4e1cf99be8a9fbf7
end06e677d4c1ac43e08783eb8117a589b6: end1b58cabccbc912ea4e1cf99be8a9fbf7:
; ;
// match: (Zero [size] destptr mem) // match: (Zero [size] destptr mem)
// cond: size%8 != 0 && size > 8 // cond: size%8 != 0 && size > 8
...@@ -14731,10 +14731,10 @@ endc8760f86b83b1372fce0042ab5200fc1: ...@@ -14731,10 +14731,10 @@ endc8760f86b83b1372fce0042ab5200fc1:
; ;
// match: (Zero [16] destptr mem) // match: (Zero [16] destptr mem)
// cond: // cond:
// result: (MOVQstoreconst [makeStoreConst(0,8)] destptr (MOVQstoreconst [0] destptr mem)) // result: (MOVQstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [0] destptr mem))
{ {
if v.AuxInt != 16 { if v.AuxInt != 16 {
goto endce0bdb028011236be9f04fb53462204d goto endf1447d60cbf8025adaf1a02a2cd219c4
} }
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
...@@ -14742,7 +14742,7 @@ endc8760f86b83b1372fce0042ab5200fc1: ...@@ -14742,7 +14742,7 @@ endc8760f86b83b1372fce0042ab5200fc1:
v.AuxInt = 0 v.AuxInt = 0
v.Aux = nil v.Aux = nil
v.resetArgs() v.resetArgs()
v.AuxInt = makeStoreConst(0, 8) v.AuxInt = makeValAndOff(0, 8)
v.AddArg(destptr) v.AddArg(destptr)
v0 := b.NewValue0(v.Line, OpAMD64MOVQstoreconst, TypeInvalid) v0 := b.NewValue0(v.Line, OpAMD64MOVQstoreconst, TypeInvalid)
v0.AuxInt = 0 v0.AuxInt = 0
...@@ -14752,15 +14752,15 @@ endc8760f86b83b1372fce0042ab5200fc1: ...@@ -14752,15 +14752,15 @@ endc8760f86b83b1372fce0042ab5200fc1:
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
goto endce0bdb028011236be9f04fb53462204d goto endf1447d60cbf8025adaf1a02a2cd219c4
endce0bdb028011236be9f04fb53462204d: endf1447d60cbf8025adaf1a02a2cd219c4:
; ;
// match: (Zero [24] destptr mem) // match: (Zero [24] destptr mem)
// cond: // cond:
// result: (MOVQstoreconst [makeStoreConst(0,16)] destptr (MOVQstoreconst [makeStoreConst(0,8)] destptr (MOVQstoreconst [0] destptr mem))) // result: (MOVQstoreconst [makeValAndOff(0,16)] destptr (MOVQstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [0] destptr mem)))
{ {
if v.AuxInt != 24 { if v.AuxInt != 24 {
goto end859fe3911b36516ea096299b2a85350e goto end57f2984a61c64f71a528e7fa75576095
} }
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
...@@ -14768,10 +14768,10 @@ endce0bdb028011236be9f04fb53462204d: ...@@ -14768,10 +14768,10 @@ endce0bdb028011236be9f04fb53462204d:
v.AuxInt = 0 v.AuxInt = 0
v.Aux = nil v.Aux = nil
v.resetArgs() v.resetArgs()
v.AuxInt = makeStoreConst(0, 16) v.AuxInt = makeValAndOff(0, 16)
v.AddArg(destptr) v.AddArg(destptr)
v0 := b.NewValue0(v.Line, OpAMD64MOVQstoreconst, TypeInvalid) v0 := b.NewValue0(v.Line, OpAMD64MOVQstoreconst, TypeInvalid)
v0.AuxInt = makeStoreConst(0, 8) v0.AuxInt = makeValAndOff(0, 8)
v0.AddArg(destptr) v0.AddArg(destptr)
v1 := b.NewValue0(v.Line, OpAMD64MOVQstoreconst, TypeInvalid) v1 := b.NewValue0(v.Line, OpAMD64MOVQstoreconst, TypeInvalid)
v1.AuxInt = 0 v1.AuxInt = 0
...@@ -14783,15 +14783,15 @@ endce0bdb028011236be9f04fb53462204d: ...@@ -14783,15 +14783,15 @@ endce0bdb028011236be9f04fb53462204d:
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
goto end859fe3911b36516ea096299b2a85350e goto end57f2984a61c64f71a528e7fa75576095
end859fe3911b36516ea096299b2a85350e: end57f2984a61c64f71a528e7fa75576095:
; ;
// match: (Zero [32] destptr mem) // match: (Zero [32] destptr mem)
// cond: // cond:
// result: (MOVQstoreconst [makeStoreConst(0,24)] destptr (MOVQstoreconst [makeStoreConst(0,16)] destptr (MOVQstoreconst [makeStoreConst(0,8)] destptr (MOVQstoreconst [0] destptr mem)))) // result: (MOVQstoreconst [makeValAndOff(0,24)] destptr (MOVQstoreconst [makeValAndOff(0,16)] destptr (MOVQstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [0] destptr mem))))
{ {
if v.AuxInt != 32 { if v.AuxInt != 32 {
goto end2c246614f6a9a07f1a683691b3f5780f goto end418a59f9f84dd389d37ae5c24aba2760
} }
destptr := v.Args[0] destptr := v.Args[0]
mem := v.Args[1] mem := v.Args[1]
...@@ -14799,13 +14799,13 @@ end859fe3911b36516ea096299b2a85350e: ...@@ -14799,13 +14799,13 @@ end859fe3911b36516ea096299b2a85350e:
v.AuxInt = 0 v.AuxInt = 0
v.Aux = nil v.Aux = nil
v.resetArgs() v.resetArgs()
v.AuxInt = makeStoreConst(0, 24) v.AuxInt = makeValAndOff(0, 24)
v.AddArg(destptr) v.AddArg(destptr)
v0 := b.NewValue0(v.Line, OpAMD64MOVQstoreconst, TypeInvalid) v0 := b.NewValue0(v.Line, OpAMD64MOVQstoreconst, TypeInvalid)
v0.AuxInt = makeStoreConst(0, 16) v0.AuxInt = makeValAndOff(0, 16)
v0.AddArg(destptr) v0.AddArg(destptr)
v1 := b.NewValue0(v.Line, OpAMD64MOVQstoreconst, TypeInvalid) v1 := b.NewValue0(v.Line, OpAMD64MOVQstoreconst, TypeInvalid)
v1.AuxInt = makeStoreConst(0, 8) v1.AuxInt = makeValAndOff(0, 8)
v1.AddArg(destptr) v1.AddArg(destptr)
v2 := b.NewValue0(v.Line, OpAMD64MOVQstoreconst, TypeInvalid) v2 := b.NewValue0(v.Line, OpAMD64MOVQstoreconst, TypeInvalid)
v2.AuxInt = 0 v2.AuxInt = 0
...@@ -14819,8 +14819,8 @@ end859fe3911b36516ea096299b2a85350e: ...@@ -14819,8 +14819,8 @@ end859fe3911b36516ea096299b2a85350e:
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
goto end2c246614f6a9a07f1a683691b3f5780f goto end418a59f9f84dd389d37ae5c24aba2760
end2c246614f6a9a07f1a683691b3f5780f: end418a59f9f84dd389d37ae5c24aba2760:
; ;
// match: (Zero [size] destptr mem) // match: (Zero [size] destptr mem)
// cond: size <= 1024 && size%8 == 0 && size%16 != 0 // cond: size <= 1024 && size%8 == 0 && size%16 != 0
......
...@@ -61,16 +61,22 @@ func (v *Value) String() string { ...@@ -61,16 +61,22 @@ func (v *Value) String() string {
func (v *Value) LongString() string { func (v *Value) LongString() string {
s := fmt.Sprintf("v%d = %s", v.ID, v.Op.String()) s := fmt.Sprintf("v%d = %s", v.ID, v.Op.String())
s += " <" + v.Type.String() + ">" s += " <" + v.Type.String() + ">"
if v.AuxInt != 0 { // TODO: use some operator property flags to decide
s += fmt.Sprintf(" [%d]", v.AuxInt) // what is encoded in the AuxInt field.
switch v.Op {
switch { case OpConst32F, OpConst64F:
case v.Op == OpConst32F || v.Op == OpConst64F: s += fmt.Sprintf(" [%g]", math.Float64frombits(uint64(v.AuxInt)))
s += fmt.Sprintf("(%g)", math.Float64frombits(uint64(v.AuxInt))) case OpConstBool:
case v.Op == OpConstBool && v.AuxInt == 0: if v.AuxInt == 0 {
s += " (false)" s += " [false]"
case v.Op == OpConstBool && v.AuxInt == 1: } else {
s += " (true)" s += " [true]"
}
case OpAMD64MOVBstoreconst, OpAMD64MOVWstoreconst, OpAMD64MOVLstoreconst, OpAMD64MOVQstoreconst:
s += fmt.Sprintf(" [%s]", ValAndOff(v.AuxInt))
default:
if v.AuxInt != 0 {
s += fmt.Sprintf(" [%d]", v.AuxInt)
} }
} }
if v.Aux != nil { if v.Aux != nil {
...@@ -132,6 +138,11 @@ func (v *Value) copyInto(b *Block) *Value { ...@@ -132,6 +138,11 @@ func (v *Value) copyInto(b *Block) *Value {
c.Aux = v.Aux c.Aux = v.Aux
c.AuxInt = v.AuxInt c.AuxInt = v.AuxInt
c.AddArgs(v.Args...) c.AddArgs(v.Args...)
for _, a := range v.Args {
if a.Type.IsMemory() {
v.Fatalf("can't move a value with a memory arg %s", v.LongString())
}
}
return c return c
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment