Commit d45f24c0 authored by Lynn Boger's avatar Lynn Boger

cmd/compile: use bounded shift information on ppc64x

Makes use of bounded shift information to generate
more efficient shift instructions.

Updates #25167 for ppc64x

Change-Id: I7fc8d49a3fb3e0f273cc51bc767470b239cbdca7
Reviewed-on: https://go-review.googlesource.com/135380
Run-TryBot: Lynn Boger <laboger@linux.vnet.ibm.com>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: default avatarMichael Munday <mike.munday@ibm.com>
parent c381ba86
...@@ -168,6 +168,20 @@ ...@@ -168,6 +168,20 @@
(Rsh8x32 x (MOVDconst [c])) && uint32(c) < 8 -> (SRAWconst (SignExt8to32 x) [c]) (Rsh8x32 x (MOVDconst [c])) && uint32(c) < 8 -> (SRAWconst (SignExt8to32 x) [c])
(Rsh8Ux32 x (MOVDconst [c])) && uint32(c) < 8 -> (SRWconst (ZeroExt8to32 x) [c]) (Rsh8Ux32 x (MOVDconst [c])) && uint32(c) < 8 -> (SRWconst (ZeroExt8to32 x) [c])
// Lower bounded shifts first. No need to check shift value.
(Lsh64x(64|32|16|8) x y) && shiftIsBounded(v) -> (SLD x y)
(Lsh32x(64|32|16|8) x y) && shiftIsBounded(v) -> (SLW x y)
(Lsh16x(64|32|16|8) x y) && shiftIsBounded(v) -> (SLW x y)
(Lsh8x(64|32|16|8) x y) && shiftIsBounded(v) -> (SLW x y)
(Rsh64Ux(64|32|16|8) x y) && shiftIsBounded(v) -> (SRD x y)
(Rsh32Ux(64|32|16|8) x y) && shiftIsBounded(v) -> (SRW x y)
(Rsh16Ux(64|32|16|8) x y) && shiftIsBounded(v) -> (SRW (MOVHZreg x) y)
(Rsh8Ux(64|32|16|8) x y) && shiftIsBounded(v) -> (SRW (MOVBZreg x) y)
(Rsh64x(64|32|16|8) x y) && shiftIsBounded(v) -> (SRAD x y)
(Rsh32x(64|32|16|8) x y) && shiftIsBounded(v) -> (SRAW x y)
(Rsh16x(64|32|16|8) x y) && shiftIsBounded(v) -> (SRAW (MOVHreg x) y)
(Rsh8x(64|32|16|8) x y) && shiftIsBounded(v) -> (SRAW (MOVBreg x) y)
// non-constant rotates // non-constant rotates
// These are subexpressions found in statements that can become rotates // These are subexpressions found in statements that can become rotates
// In these cases the shift count is known to be < 64 so the more complicated expressions // In these cases the shift count is known to be < 64 so the more complicated expressions
......
...@@ -544,7 +544,7 @@ func rewriteValuePPC64(v *Value) bool { ...@@ -544,7 +544,7 @@ func rewriteValuePPC64(v *Value) bool {
case OpRsh32Ux32: case OpRsh32Ux32:
return rewriteValuePPC64_OpRsh32Ux32_0(v) return rewriteValuePPC64_OpRsh32Ux32_0(v)
case OpRsh32Ux64: case OpRsh32Ux64:
return rewriteValuePPC64_OpRsh32Ux64_0(v) return rewriteValuePPC64_OpRsh32Ux64_0(v) || rewriteValuePPC64_OpRsh32Ux64_10(v)
case OpRsh32Ux8: case OpRsh32Ux8:
return rewriteValuePPC64_OpRsh32Ux8_0(v) return rewriteValuePPC64_OpRsh32Ux8_0(v)
case OpRsh32x16: case OpRsh32x16:
...@@ -552,7 +552,7 @@ func rewriteValuePPC64(v *Value) bool { ...@@ -552,7 +552,7 @@ func rewriteValuePPC64(v *Value) bool {
case OpRsh32x32: case OpRsh32x32:
return rewriteValuePPC64_OpRsh32x32_0(v) return rewriteValuePPC64_OpRsh32x32_0(v)
case OpRsh32x64: case OpRsh32x64:
return rewriteValuePPC64_OpRsh32x64_0(v) return rewriteValuePPC64_OpRsh32x64_0(v) || rewriteValuePPC64_OpRsh32x64_10(v)
case OpRsh32x8: case OpRsh32x8:
return rewriteValuePPC64_OpRsh32x8_0(v) return rewriteValuePPC64_OpRsh32x8_0(v)
case OpRsh64Ux16: case OpRsh64Ux16:
...@@ -560,7 +560,7 @@ func rewriteValuePPC64(v *Value) bool { ...@@ -560,7 +560,7 @@ func rewriteValuePPC64(v *Value) bool {
case OpRsh64Ux32: case OpRsh64Ux32:
return rewriteValuePPC64_OpRsh64Ux32_0(v) return rewriteValuePPC64_OpRsh64Ux32_0(v)
case OpRsh64Ux64: case OpRsh64Ux64:
return rewriteValuePPC64_OpRsh64Ux64_0(v) return rewriteValuePPC64_OpRsh64Ux64_0(v) || rewriteValuePPC64_OpRsh64Ux64_10(v)
case OpRsh64Ux8: case OpRsh64Ux8:
return rewriteValuePPC64_OpRsh64Ux8_0(v) return rewriteValuePPC64_OpRsh64Ux8_0(v)
case OpRsh64x16: case OpRsh64x16:
...@@ -568,7 +568,7 @@ func rewriteValuePPC64(v *Value) bool { ...@@ -568,7 +568,7 @@ func rewriteValuePPC64(v *Value) bool {
case OpRsh64x32: case OpRsh64x32:
return rewriteValuePPC64_OpRsh64x32_0(v) return rewriteValuePPC64_OpRsh64x32_0(v)
case OpRsh64x64: case OpRsh64x64:
return rewriteValuePPC64_OpRsh64x64_0(v) return rewriteValuePPC64_OpRsh64x64_0(v) || rewriteValuePPC64_OpRsh64x64_10(v)
case OpRsh64x8: case OpRsh64x8:
return rewriteValuePPC64_OpRsh64x8_0(v) return rewriteValuePPC64_OpRsh64x8_0(v)
case OpRsh8Ux16: case OpRsh8Ux16:
...@@ -3070,6 +3070,21 @@ func rewriteValuePPC64_OpLsh16x16_0(v *Value) bool { ...@@ -3070,6 +3070,21 @@ func rewriteValuePPC64_OpLsh16x16_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Lsh16x16 x y) // match: (Lsh16x16 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SLW)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Lsh16x16 x y)
// cond: // cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt16to64 y))))) // result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt16to64 y)))))
for { for {
...@@ -3136,6 +3151,21 @@ func rewriteValuePPC64_OpLsh16x32_0(v *Value) bool { ...@@ -3136,6 +3151,21 @@ func rewriteValuePPC64_OpLsh16x32_0(v *Value) bool {
return true return true
} }
// match: (Lsh16x32 x y) // match: (Lsh16x32 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SLW)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Lsh16x32 x y)
// cond: // cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt32to64 y))))) // result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt32to64 y)))))
for { for {
...@@ -3219,6 +3249,21 @@ func rewriteValuePPC64_OpLsh16x64_0(v *Value) bool { ...@@ -3219,6 +3249,21 @@ func rewriteValuePPC64_OpLsh16x64_0(v *Value) bool {
return true return true
} }
// match: (Lsh16x64 x y) // match: (Lsh16x64 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SLW)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Lsh16x64 x y)
// cond: // cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] y)))) // result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] y))))
for { for {
...@@ -3245,6 +3290,21 @@ func rewriteValuePPC64_OpLsh16x8_0(v *Value) bool { ...@@ -3245,6 +3290,21 @@ func rewriteValuePPC64_OpLsh16x8_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Lsh16x8 x y) // match: (Lsh16x8 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SLW)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Lsh16x8 x y)
// cond: // cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt8to64 y))))) // result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt8to64 y)))))
for { for {
...@@ -3273,6 +3333,21 @@ func rewriteValuePPC64_OpLsh32x16_0(v *Value) bool { ...@@ -3273,6 +3333,21 @@ func rewriteValuePPC64_OpLsh32x16_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Lsh32x16 x y) // match: (Lsh32x16 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SLW)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Lsh32x16 x y)
// cond: // cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt16to64 y))))) // result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt16to64 y)))))
for { for {
...@@ -3339,6 +3414,21 @@ func rewriteValuePPC64_OpLsh32x32_0(v *Value) bool { ...@@ -3339,6 +3414,21 @@ func rewriteValuePPC64_OpLsh32x32_0(v *Value) bool {
return true return true
} }
// match: (Lsh32x32 x y) // match: (Lsh32x32 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SLW)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Lsh32x32 x y)
// cond: // cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt32to64 y))))) // result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt32to64 y)))))
for { for {
...@@ -3421,6 +3511,21 @@ func rewriteValuePPC64_OpLsh32x64_0(v *Value) bool { ...@@ -3421,6 +3511,21 @@ func rewriteValuePPC64_OpLsh32x64_0(v *Value) bool {
v.AddArg(x) v.AddArg(x)
return true return true
} }
// match: (Lsh32x64 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SLW)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Lsh32x64 x (AND y (MOVDconst [31]))) // match: (Lsh32x64 x (AND y (MOVDconst [31])))
// cond: // cond:
// result: (SLW x (ANDconst <typ.Int32> [31] y)) // result: (SLW x (ANDconst <typ.Int32> [31] y))
...@@ -3527,6 +3632,21 @@ func rewriteValuePPC64_OpLsh32x8_0(v *Value) bool { ...@@ -3527,6 +3632,21 @@ func rewriteValuePPC64_OpLsh32x8_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Lsh32x8 x y) // match: (Lsh32x8 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SLW)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Lsh32x8 x y)
// cond: // cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt8to64 y))))) // result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt8to64 y)))))
for { for {
...@@ -3555,6 +3675,21 @@ func rewriteValuePPC64_OpLsh64x16_0(v *Value) bool { ...@@ -3555,6 +3675,21 @@ func rewriteValuePPC64_OpLsh64x16_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Lsh64x16 x y) // match: (Lsh64x16 x y)
// cond: shiftIsBounded(v)
// result: (SLD x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SLD)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Lsh64x16 x y)
// cond: // cond:
// result: (SLD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt16to64 y))))) // result: (SLD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt16to64 y)))))
for { for {
...@@ -3621,6 +3756,21 @@ func rewriteValuePPC64_OpLsh64x32_0(v *Value) bool { ...@@ -3621,6 +3756,21 @@ func rewriteValuePPC64_OpLsh64x32_0(v *Value) bool {
return true return true
} }
// match: (Lsh64x32 x y) // match: (Lsh64x32 x y)
// cond: shiftIsBounded(v)
// result: (SLD x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SLD)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Lsh64x32 x y)
// cond: // cond:
// result: (SLD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt32to64 y))))) // result: (SLD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt32to64 y)))))
for { for {
...@@ -3703,6 +3853,21 @@ func rewriteValuePPC64_OpLsh64x64_0(v *Value) bool { ...@@ -3703,6 +3853,21 @@ func rewriteValuePPC64_OpLsh64x64_0(v *Value) bool {
v.AddArg(x) v.AddArg(x)
return true return true
} }
// match: (Lsh64x64 x y)
// cond: shiftIsBounded(v)
// result: (SLD x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SLD)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Lsh64x64 x (AND y (MOVDconst [63]))) // match: (Lsh64x64 x (AND y (MOVDconst [63])))
// cond: // cond:
// result: (SLD x (ANDconst <typ.Int64> [63] y)) // result: (SLD x (ANDconst <typ.Int64> [63] y))
...@@ -3809,6 +3974,21 @@ func rewriteValuePPC64_OpLsh64x8_0(v *Value) bool { ...@@ -3809,6 +3974,21 @@ func rewriteValuePPC64_OpLsh64x8_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Lsh64x8 x y) // match: (Lsh64x8 x y)
// cond: shiftIsBounded(v)
// result: (SLD x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SLD)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Lsh64x8 x y)
// cond: // cond:
// result: (SLD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt8to64 y))))) // result: (SLD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt8to64 y)))))
for { for {
...@@ -3837,6 +4017,21 @@ func rewriteValuePPC64_OpLsh8x16_0(v *Value) bool { ...@@ -3837,6 +4017,21 @@ func rewriteValuePPC64_OpLsh8x16_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Lsh8x16 x y) // match: (Lsh8x16 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SLW)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Lsh8x16 x y)
// cond: // cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt16to64 y))))) // result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt16to64 y)))))
for { for {
...@@ -3903,6 +4098,21 @@ func rewriteValuePPC64_OpLsh8x32_0(v *Value) bool { ...@@ -3903,6 +4098,21 @@ func rewriteValuePPC64_OpLsh8x32_0(v *Value) bool {
return true return true
} }
// match: (Lsh8x32 x y) // match: (Lsh8x32 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SLW)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Lsh8x32 x y)
// cond: // cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt32to64 y))))) // result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt32to64 y)))))
for { for {
...@@ -3986,6 +4196,21 @@ func rewriteValuePPC64_OpLsh8x64_0(v *Value) bool { ...@@ -3986,6 +4196,21 @@ func rewriteValuePPC64_OpLsh8x64_0(v *Value) bool {
return true return true
} }
// match: (Lsh8x64 x y) // match: (Lsh8x64 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SLW)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Lsh8x64 x y)
// cond: // cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] y)))) // result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] y))))
for { for {
...@@ -4012,6 +4237,21 @@ func rewriteValuePPC64_OpLsh8x8_0(v *Value) bool { ...@@ -4012,6 +4237,21 @@ func rewriteValuePPC64_OpLsh8x8_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Lsh8x8 x y) // match: (Lsh8x8 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SLW)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Lsh8x8 x y)
// cond: // cond:
// result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt8to64 y))))) // result: (SLW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt8to64 y)))))
for { for {
...@@ -25137,6 +25377,23 @@ func rewriteValuePPC64_OpRsh16Ux16_0(v *Value) bool { ...@@ -25137,6 +25377,23 @@ func rewriteValuePPC64_OpRsh16Ux16_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Rsh16Ux16 x y) // match: (Rsh16Ux16 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVHZreg x) y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRW)
v0 := b.NewValue0(v.Pos, OpPPC64MOVHZreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
v.AddArg(y)
return true
}
// match: (Rsh16Ux16 x y)
// cond: // cond:
// result: (SRW (ZeroExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt16to64 y))))) // result: (SRW (ZeroExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt16to64 y)))))
for { for {
...@@ -25209,6 +25466,23 @@ func rewriteValuePPC64_OpRsh16Ux32_0(v *Value) bool { ...@@ -25209,6 +25466,23 @@ func rewriteValuePPC64_OpRsh16Ux32_0(v *Value) bool {
return true return true
} }
// match: (Rsh16Ux32 x y) // match: (Rsh16Ux32 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVHZreg x) y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRW)
v0 := b.NewValue0(v.Pos, OpPPC64MOVHZreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
v.AddArg(y)
return true
}
// match: (Rsh16Ux32 x y)
// cond: // cond:
// result: (SRW (ZeroExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt32to64 y))))) // result: (SRW (ZeroExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt32to64 y)))))
for { for {
...@@ -25298,6 +25572,23 @@ func rewriteValuePPC64_OpRsh16Ux64_0(v *Value) bool { ...@@ -25298,6 +25572,23 @@ func rewriteValuePPC64_OpRsh16Ux64_0(v *Value) bool {
return true return true
} }
// match: (Rsh16Ux64 x y) // match: (Rsh16Ux64 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVHZreg x) y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRW)
v0 := b.NewValue0(v.Pos, OpPPC64MOVHZreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
v.AddArg(y)
return true
}
// match: (Rsh16Ux64 x y)
// cond: // cond:
// result: (SRW (ZeroExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] y)))) // result: (SRW (ZeroExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] y))))
for { for {
...@@ -25326,6 +25617,23 @@ func rewriteValuePPC64_OpRsh16Ux8_0(v *Value) bool { ...@@ -25326,6 +25617,23 @@ func rewriteValuePPC64_OpRsh16Ux8_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Rsh16Ux8 x y) // match: (Rsh16Ux8 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVHZreg x) y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRW)
v0 := b.NewValue0(v.Pos, OpPPC64MOVHZreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
v.AddArg(y)
return true
}
// match: (Rsh16Ux8 x y)
// cond: // cond:
// result: (SRW (ZeroExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt8to64 y))))) // result: (SRW (ZeroExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt8to64 y)))))
for { for {
...@@ -25356,18 +25664,35 @@ func rewriteValuePPC64_OpRsh16x16_0(v *Value) bool { ...@@ -25356,18 +25664,35 @@ func rewriteValuePPC64_OpRsh16x16_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Rsh16x16 x y) // match: (Rsh16x16 x y)
// cond: // cond: shiftIsBounded(v)
// result: (SRAW (SignExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt16to64 y))))) // result: (SRAW (MOVHreg x) y)
for { for {
_ = v.Args[1] _ = v.Args[1]
x := v.Args[0] x := v.Args[0]
y := v.Args[1] y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRAW) v.reset(OpPPC64SRAW)
v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) v0 := b.NewValue0(v.Pos, OpPPC64MOVHreg, typ.Int64)
v0.AddArg(x) v0.AddArg(x)
v.AddArg(v0) v.AddArg(v0)
v1 := b.NewValue0(v.Pos, OpPPC64ORN, typ.Int64) v.AddArg(y)
v1.AddArg(y) return true
}
// match: (Rsh16x16 x y)
// cond:
// result: (SRAW (SignExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt16to64 y)))))
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
v.reset(OpPPC64SRAW)
v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
v0.AddArg(x)
v.AddArg(v0)
v1 := b.NewValue0(v.Pos, OpPPC64ORN, typ.Int64)
v1.AddArg(y)
v2 := b.NewValue0(v.Pos, OpPPC64MaskIfNotCarry, typ.Int64) v2 := b.NewValue0(v.Pos, OpPPC64MaskIfNotCarry, typ.Int64)
v3 := b.NewValue0(v.Pos, OpPPC64ADDconstForCarry, types.TypeFlags) v3 := b.NewValue0(v.Pos, OpPPC64ADDconstForCarry, types.TypeFlags)
v3.AuxInt = -16 v3.AuxInt = -16
...@@ -25428,6 +25753,23 @@ func rewriteValuePPC64_OpRsh16x32_0(v *Value) bool { ...@@ -25428,6 +25753,23 @@ func rewriteValuePPC64_OpRsh16x32_0(v *Value) bool {
return true return true
} }
// match: (Rsh16x32 x y) // match: (Rsh16x32 x y)
// cond: shiftIsBounded(v)
// result: (SRAW (MOVHreg x) y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRAW)
v0 := b.NewValue0(v.Pos, OpPPC64MOVHreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
v.AddArg(y)
return true
}
// match: (Rsh16x32 x y)
// cond: // cond:
// result: (SRAW (SignExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt32to64 y))))) // result: (SRAW (SignExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt32to64 y)))))
for { for {
...@@ -25521,6 +25863,23 @@ func rewriteValuePPC64_OpRsh16x64_0(v *Value) bool { ...@@ -25521,6 +25863,23 @@ func rewriteValuePPC64_OpRsh16x64_0(v *Value) bool {
return true return true
} }
// match: (Rsh16x64 x y) // match: (Rsh16x64 x y)
// cond: shiftIsBounded(v)
// result: (SRAW (MOVHreg x) y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRAW)
v0 := b.NewValue0(v.Pos, OpPPC64MOVHreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
v.AddArg(y)
return true
}
// match: (Rsh16x64 x y)
// cond: // cond:
// result: (SRAW (SignExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] y)))) // result: (SRAW (SignExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] y))))
for { for {
...@@ -25549,6 +25908,23 @@ func rewriteValuePPC64_OpRsh16x8_0(v *Value) bool { ...@@ -25549,6 +25908,23 @@ func rewriteValuePPC64_OpRsh16x8_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Rsh16x8 x y) // match: (Rsh16x8 x y)
// cond: shiftIsBounded(v)
// result: (SRAW (MOVHreg x) y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRAW)
v0 := b.NewValue0(v.Pos, OpPPC64MOVHreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
v.AddArg(y)
return true
}
// match: (Rsh16x8 x y)
// cond: // cond:
// result: (SRAW (SignExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt8to64 y))))) // result: (SRAW (SignExt16to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt8to64 y)))))
for { for {
...@@ -25579,6 +25955,21 @@ func rewriteValuePPC64_OpRsh32Ux16_0(v *Value) bool { ...@@ -25579,6 +25955,21 @@ func rewriteValuePPC64_OpRsh32Ux16_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Rsh32Ux16 x y) // match: (Rsh32Ux16 x y)
// cond: shiftIsBounded(v)
// result: (SRW x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRW)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Rsh32Ux16 x y)
// cond: // cond:
// result: (SRW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt16to64 y))))) // result: (SRW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt16to64 y)))))
for { for {
...@@ -25645,6 +26036,21 @@ func rewriteValuePPC64_OpRsh32Ux32_0(v *Value) bool { ...@@ -25645,6 +26036,21 @@ func rewriteValuePPC64_OpRsh32Ux32_0(v *Value) bool {
return true return true
} }
// match: (Rsh32Ux32 x y) // match: (Rsh32Ux32 x y)
// cond: shiftIsBounded(v)
// result: (SRW x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRW)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Rsh32Ux32 x y)
// cond: // cond:
// result: (SRW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt32to64 y))))) // result: (SRW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt32to64 y)))))
for { for {
...@@ -25727,6 +26133,21 @@ func rewriteValuePPC64_OpRsh32Ux64_0(v *Value) bool { ...@@ -25727,6 +26133,21 @@ func rewriteValuePPC64_OpRsh32Ux64_0(v *Value) bool {
v.AddArg(x) v.AddArg(x)
return true return true
} }
// match: (Rsh32Ux64 x y)
// cond: shiftIsBounded(v)
// result: (SRW x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRW)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Rsh32Ux64 x (AND y (MOVDconst [31]))) // match: (Rsh32Ux64 x (AND y (MOVDconst [31])))
// cond: // cond:
// result: (SRW x (ANDconst <typ.Int32> [31] y)) // result: (SRW x (ANDconst <typ.Int32> [31] y))
...@@ -25951,6 +26372,13 @@ func rewriteValuePPC64_OpRsh32Ux64_0(v *Value) bool { ...@@ -25951,6 +26372,13 @@ func rewriteValuePPC64_OpRsh32Ux64_0(v *Value) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
return false
}
func rewriteValuePPC64_OpRsh32Ux64_10(v *Value) bool {
b := v.Block
_ = b
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh32Ux64 x y) // match: (Rsh32Ux64 x y)
// cond: // cond:
// result: (SRW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] y)))) // result: (SRW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] y))))
...@@ -25978,6 +26406,21 @@ func rewriteValuePPC64_OpRsh32Ux8_0(v *Value) bool { ...@@ -25978,6 +26406,21 @@ func rewriteValuePPC64_OpRsh32Ux8_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Rsh32Ux8 x y) // match: (Rsh32Ux8 x y)
// cond: shiftIsBounded(v)
// result: (SRW x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRW)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Rsh32Ux8 x y)
// cond: // cond:
// result: (SRW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt8to64 y))))) // result: (SRW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt8to64 y)))))
for { for {
...@@ -26006,6 +26449,21 @@ func rewriteValuePPC64_OpRsh32x16_0(v *Value) bool { ...@@ -26006,6 +26449,21 @@ func rewriteValuePPC64_OpRsh32x16_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Rsh32x16 x y) // match: (Rsh32x16 x y)
// cond: shiftIsBounded(v)
// result: (SRAW x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRAW)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Rsh32x16 x y)
// cond: // cond:
// result: (SRAW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt16to64 y))))) // result: (SRAW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt16to64 y)))))
for { for {
...@@ -26072,6 +26530,21 @@ func rewriteValuePPC64_OpRsh32x32_0(v *Value) bool { ...@@ -26072,6 +26530,21 @@ func rewriteValuePPC64_OpRsh32x32_0(v *Value) bool {
return true return true
} }
// match: (Rsh32x32 x y) // match: (Rsh32x32 x y)
// cond: shiftIsBounded(v)
// result: (SRAW x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRAW)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Rsh32x32 x y)
// cond: // cond:
// result: (SRAW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt32to64 y))))) // result: (SRAW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt32to64 y)))))
for { for {
...@@ -26156,6 +26629,21 @@ func rewriteValuePPC64_OpRsh32x64_0(v *Value) bool { ...@@ -26156,6 +26629,21 @@ func rewriteValuePPC64_OpRsh32x64_0(v *Value) bool {
v.AddArg(x) v.AddArg(x)
return true return true
} }
// match: (Rsh32x64 x y)
// cond: shiftIsBounded(v)
// result: (SRAW x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRAW)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Rsh32x64 x (AND y (MOVDconst [31]))) // match: (Rsh32x64 x (AND y (MOVDconst [31])))
// cond: // cond:
// result: (SRAW x (ANDconst <typ.Int32> [31] y)) // result: (SRAW x (ANDconst <typ.Int32> [31] y))
...@@ -26380,6 +26868,13 @@ func rewriteValuePPC64_OpRsh32x64_0(v *Value) bool { ...@@ -26380,6 +26868,13 @@ func rewriteValuePPC64_OpRsh32x64_0(v *Value) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
return false
}
func rewriteValuePPC64_OpRsh32x64_10(v *Value) bool {
b := v.Block
_ = b
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh32x64 x y) // match: (Rsh32x64 x y)
// cond: // cond:
// result: (SRAW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] y)))) // result: (SRAW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] y))))
...@@ -26407,6 +26902,21 @@ func rewriteValuePPC64_OpRsh32x8_0(v *Value) bool { ...@@ -26407,6 +26902,21 @@ func rewriteValuePPC64_OpRsh32x8_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Rsh32x8 x y) // match: (Rsh32x8 x y)
// cond: shiftIsBounded(v)
// result: (SRAW x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRAW)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Rsh32x8 x y)
// cond: // cond:
// result: (SRAW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt8to64 y))))) // result: (SRAW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt8to64 y)))))
for { for {
...@@ -26435,6 +26945,21 @@ func rewriteValuePPC64_OpRsh64Ux16_0(v *Value) bool { ...@@ -26435,6 +26945,21 @@ func rewriteValuePPC64_OpRsh64Ux16_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Rsh64Ux16 x y) // match: (Rsh64Ux16 x y)
// cond: shiftIsBounded(v)
// result: (SRD x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRD)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Rsh64Ux16 x y)
// cond: // cond:
// result: (SRD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt16to64 y))))) // result: (SRD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt16to64 y)))))
for { for {
...@@ -26501,6 +27026,21 @@ func rewriteValuePPC64_OpRsh64Ux32_0(v *Value) bool { ...@@ -26501,6 +27026,21 @@ func rewriteValuePPC64_OpRsh64Ux32_0(v *Value) bool {
return true return true
} }
// match: (Rsh64Ux32 x y) // match: (Rsh64Ux32 x y)
// cond: shiftIsBounded(v)
// result: (SRD x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRD)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Rsh64Ux32 x y)
// cond: // cond:
// result: (SRD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt32to64 y))))) // result: (SRD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt32to64 y)))))
for { for {
...@@ -26583,6 +27123,21 @@ func rewriteValuePPC64_OpRsh64Ux64_0(v *Value) bool { ...@@ -26583,6 +27123,21 @@ func rewriteValuePPC64_OpRsh64Ux64_0(v *Value) bool {
v.AddArg(x) v.AddArg(x)
return true return true
} }
// match: (Rsh64Ux64 x y)
// cond: shiftIsBounded(v)
// result: (SRD x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRD)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Rsh64Ux64 x (AND y (MOVDconst [63]))) // match: (Rsh64Ux64 x (AND y (MOVDconst [63])))
// cond: // cond:
// result: (SRD x (ANDconst <typ.Int64> [63] y)) // result: (SRD x (ANDconst <typ.Int64> [63] y))
...@@ -26807,6 +27362,13 @@ func rewriteValuePPC64_OpRsh64Ux64_0(v *Value) bool { ...@@ -26807,6 +27362,13 @@ func rewriteValuePPC64_OpRsh64Ux64_0(v *Value) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
return false
}
func rewriteValuePPC64_OpRsh64Ux64_10(v *Value) bool {
b := v.Block
_ = b
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh64Ux64 x y) // match: (Rsh64Ux64 x y)
// cond: // cond:
// result: (SRD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] y)))) // result: (SRD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] y))))
...@@ -26834,6 +27396,21 @@ func rewriteValuePPC64_OpRsh64Ux8_0(v *Value) bool { ...@@ -26834,6 +27396,21 @@ func rewriteValuePPC64_OpRsh64Ux8_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Rsh64Ux8 x y) // match: (Rsh64Ux8 x y)
// cond: shiftIsBounded(v)
// result: (SRD x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRD)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Rsh64Ux8 x y)
// cond: // cond:
// result: (SRD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt8to64 y))))) // result: (SRD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt8to64 y)))))
for { for {
...@@ -26862,6 +27439,21 @@ func rewriteValuePPC64_OpRsh64x16_0(v *Value) bool { ...@@ -26862,6 +27439,21 @@ func rewriteValuePPC64_OpRsh64x16_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Rsh64x16 x y) // match: (Rsh64x16 x y)
// cond: shiftIsBounded(v)
// result: (SRAD x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRAD)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Rsh64x16 x y)
// cond: // cond:
// result: (SRAD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt16to64 y))))) // result: (SRAD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt16to64 y)))))
for { for {
...@@ -26928,6 +27520,21 @@ func rewriteValuePPC64_OpRsh64x32_0(v *Value) bool { ...@@ -26928,6 +27520,21 @@ func rewriteValuePPC64_OpRsh64x32_0(v *Value) bool {
return true return true
} }
// match: (Rsh64x32 x y) // match: (Rsh64x32 x y)
// cond: shiftIsBounded(v)
// result: (SRAD x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRAD)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Rsh64x32 x y)
// cond: // cond:
// result: (SRAD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt32to64 y))))) // result: (SRAD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt32to64 y)))))
for { for {
...@@ -27012,6 +27619,21 @@ func rewriteValuePPC64_OpRsh64x64_0(v *Value) bool { ...@@ -27012,6 +27619,21 @@ func rewriteValuePPC64_OpRsh64x64_0(v *Value) bool {
v.AddArg(x) v.AddArg(x)
return true return true
} }
// match: (Rsh64x64 x y)
// cond: shiftIsBounded(v)
// result: (SRAD x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRAD)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Rsh64x64 x (AND y (MOVDconst [63]))) // match: (Rsh64x64 x (AND y (MOVDconst [63])))
// cond: // cond:
// result: (SRAD x (ANDconst <typ.Int64> [63] y)) // result: (SRAD x (ANDconst <typ.Int64> [63] y))
...@@ -27236,6 +27858,13 @@ func rewriteValuePPC64_OpRsh64x64_0(v *Value) bool { ...@@ -27236,6 +27858,13 @@ func rewriteValuePPC64_OpRsh64x64_0(v *Value) bool {
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
return false
}
func rewriteValuePPC64_OpRsh64x64_10(v *Value) bool {
b := v.Block
_ = b
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh64x64 x y) // match: (Rsh64x64 x y)
// cond: // cond:
// result: (SRAD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] y)))) // result: (SRAD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] y))))
...@@ -27263,6 +27892,21 @@ func rewriteValuePPC64_OpRsh64x8_0(v *Value) bool { ...@@ -27263,6 +27892,21 @@ func rewriteValuePPC64_OpRsh64x8_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Rsh64x8 x y) // match: (Rsh64x8 x y)
// cond: shiftIsBounded(v)
// result: (SRAD x y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRAD)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (Rsh64x8 x y)
// cond: // cond:
// result: (SRAD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt8to64 y))))) // result: (SRAD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt8to64 y)))))
for { for {
...@@ -27291,6 +27935,23 @@ func rewriteValuePPC64_OpRsh8Ux16_0(v *Value) bool { ...@@ -27291,6 +27935,23 @@ func rewriteValuePPC64_OpRsh8Ux16_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Rsh8Ux16 x y) // match: (Rsh8Ux16 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVBZreg x) y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRW)
v0 := b.NewValue0(v.Pos, OpPPC64MOVBZreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
v.AddArg(y)
return true
}
// match: (Rsh8Ux16 x y)
// cond: // cond:
// result: (SRW (ZeroExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt16to64 y))))) // result: (SRW (ZeroExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt16to64 y)))))
for { for {
...@@ -27363,6 +28024,23 @@ func rewriteValuePPC64_OpRsh8Ux32_0(v *Value) bool { ...@@ -27363,6 +28024,23 @@ func rewriteValuePPC64_OpRsh8Ux32_0(v *Value) bool {
return true return true
} }
// match: (Rsh8Ux32 x y) // match: (Rsh8Ux32 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVBZreg x) y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRW)
v0 := b.NewValue0(v.Pos, OpPPC64MOVBZreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
v.AddArg(y)
return true
}
// match: (Rsh8Ux32 x y)
// cond: // cond:
// result: (SRW (ZeroExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt32to64 y))))) // result: (SRW (ZeroExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt32to64 y)))))
for { for {
...@@ -27452,6 +28130,23 @@ func rewriteValuePPC64_OpRsh8Ux64_0(v *Value) bool { ...@@ -27452,6 +28130,23 @@ func rewriteValuePPC64_OpRsh8Ux64_0(v *Value) bool {
return true return true
} }
// match: (Rsh8Ux64 x y) // match: (Rsh8Ux64 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVBZreg x) y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRW)
v0 := b.NewValue0(v.Pos, OpPPC64MOVBZreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
v.AddArg(y)
return true
}
// match: (Rsh8Ux64 x y)
// cond: // cond:
// result: (SRW (ZeroExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] y)))) // result: (SRW (ZeroExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] y))))
for { for {
...@@ -27480,6 +28175,23 @@ func rewriteValuePPC64_OpRsh8Ux8_0(v *Value) bool { ...@@ -27480,6 +28175,23 @@ func rewriteValuePPC64_OpRsh8Ux8_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Rsh8Ux8 x y) // match: (Rsh8Ux8 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVBZreg x) y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRW)
v0 := b.NewValue0(v.Pos, OpPPC64MOVBZreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
v.AddArg(y)
return true
}
// match: (Rsh8Ux8 x y)
// cond: // cond:
// result: (SRW (ZeroExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt8to64 y))))) // result: (SRW (ZeroExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt8to64 y)))))
for { for {
...@@ -27510,6 +28222,23 @@ func rewriteValuePPC64_OpRsh8x16_0(v *Value) bool { ...@@ -27510,6 +28222,23 @@ func rewriteValuePPC64_OpRsh8x16_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Rsh8x16 x y) // match: (Rsh8x16 x y)
// cond: shiftIsBounded(v)
// result: (SRAW (MOVBreg x) y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRAW)
v0 := b.NewValue0(v.Pos, OpPPC64MOVBreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
v.AddArg(y)
return true
}
// match: (Rsh8x16 x y)
// cond: // cond:
// result: (SRAW (SignExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt16to64 y))))) // result: (SRAW (SignExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt16to64 y)))))
for { for {
...@@ -27582,6 +28311,23 @@ func rewriteValuePPC64_OpRsh8x32_0(v *Value) bool { ...@@ -27582,6 +28311,23 @@ func rewriteValuePPC64_OpRsh8x32_0(v *Value) bool {
return true return true
} }
// match: (Rsh8x32 x y) // match: (Rsh8x32 x y)
// cond: shiftIsBounded(v)
// result: (SRAW (MOVBreg x) y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRAW)
v0 := b.NewValue0(v.Pos, OpPPC64MOVBreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
v.AddArg(y)
return true
}
// match: (Rsh8x32 x y)
// cond: // cond:
// result: (SRAW (SignExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt32to64 y))))) // result: (SRAW (SignExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt32to64 y)))))
for { for {
...@@ -27675,6 +28421,23 @@ func rewriteValuePPC64_OpRsh8x64_0(v *Value) bool { ...@@ -27675,6 +28421,23 @@ func rewriteValuePPC64_OpRsh8x64_0(v *Value) bool {
return true return true
} }
// match: (Rsh8x64 x y) // match: (Rsh8x64 x y)
// cond: shiftIsBounded(v)
// result: (SRAW (MOVBreg x) y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRAW)
v0 := b.NewValue0(v.Pos, OpPPC64MOVBreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
v.AddArg(y)
return true
}
// match: (Rsh8x64 x y)
// cond: // cond:
// result: (SRAW (SignExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] y)))) // result: (SRAW (SignExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] y))))
for { for {
...@@ -27703,6 +28466,23 @@ func rewriteValuePPC64_OpRsh8x8_0(v *Value) bool { ...@@ -27703,6 +28466,23 @@ func rewriteValuePPC64_OpRsh8x8_0(v *Value) bool {
typ := &b.Func.Config.Types typ := &b.Func.Config.Types
_ = typ _ = typ
// match: (Rsh8x8 x y) // match: (Rsh8x8 x y)
// cond: shiftIsBounded(v)
// result: (SRAW (MOVBreg x) y)
for {
_ = v.Args[1]
x := v.Args[0]
y := v.Args[1]
if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRAW)
v0 := b.NewValue0(v.Pos, OpPPC64MOVBreg, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
v.AddArg(y)
return true
}
// match: (Rsh8x8 x y)
// cond: // cond:
// result: (SRAW (SignExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt8to64 y))))) // result: (SRAW (SignExt8to32 x) (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt8to64 y)))))
for { for {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment