aboutsummaryrefslogtreecommitdiff
path: root/src/cmd
diff options
context:
space:
mode:
Diffstat (limited to 'src/cmd')
-rw-r--r--src/cmd/asm/internal/arch/ppc64.go2
-rw-r--r--src/cmd/compile/internal/ppc64/ssa.go97
-rw-r--r--src/cmd/compile/internal/ssa/gen/PPC64.rules56
-rw-r--r--src/cmd/compile/internal/ssa/gen/PPC64Ops.go8
-rw-r--r--src/cmd/compile/internal/ssa/opGen.go31
-rw-r--r--src/cmd/compile/internal/ssa/rewritePPC64.go1021
6 files changed, 1141 insertions, 74 deletions
diff --git a/src/cmd/asm/internal/arch/ppc64.go b/src/cmd/asm/internal/arch/ppc64.go
index 7e3d55b540..3139665ba5 100644
--- a/src/cmd/asm/internal/arch/ppc64.go
+++ b/src/cmd/asm/internal/arch/ppc64.go
@@ -47,7 +47,7 @@ func IsPPC64ISEL(op obj.As) bool {
// one of the CMP instructions that require special handling.
func IsPPC64CMP(op obj.As) bool {
switch op {
- case ppc64.ACMP, ppc64.ACMPU, ppc64.ACMPW, ppc64.ACMPWU:
+ case ppc64.ACMP, ppc64.ACMPU, ppc64.ACMPW, ppc64.ACMPWU, ppc64.AFCMPU:
return true
}
return false
diff --git a/src/cmd/compile/internal/ppc64/ssa.go b/src/cmd/compile/internal/ppc64/ssa.go
index cbe233f054..c45842efe6 100644
--- a/src/cmd/compile/internal/ppc64/ssa.go
+++ b/src/cmd/compile/internal/ppc64/ssa.go
@@ -15,28 +15,6 @@ import (
"strings"
)
-// iselOp encodes mapping of comparison operations onto ISEL operands
-type iselOp struct {
- cond int64
- valueIfCond int // if cond is true, the value to return (0 or 1)
-}
-
-// Input registers to ISEL used for comparison. Index 0 is zero, 1 is (will be) 1
-var iselRegs = [2]int16{ppc64.REG_R0, ppc64.REGTMP}
-
-var iselOps = map[ssa.Op]iselOp{
- ssa.OpPPC64Equal: {cond: ppc64.C_COND_EQ, valueIfCond: 1},
- ssa.OpPPC64NotEqual: {cond: ppc64.C_COND_EQ, valueIfCond: 0},
- ssa.OpPPC64LessThan: {cond: ppc64.C_COND_LT, valueIfCond: 1},
- ssa.OpPPC64GreaterEqual: {cond: ppc64.C_COND_LT, valueIfCond: 0},
- ssa.OpPPC64GreaterThan: {cond: ppc64.C_COND_GT, valueIfCond: 1},
- ssa.OpPPC64LessEqual: {cond: ppc64.C_COND_GT, valueIfCond: 0},
- ssa.OpPPC64FLessThan: {cond: ppc64.C_COND_LT, valueIfCond: 1},
- ssa.OpPPC64FGreaterThan: {cond: ppc64.C_COND_GT, valueIfCond: 1},
- ssa.OpPPC64FLessEqual: {cond: ppc64.C_COND_LT, valueIfCond: 1}, // 2 comparisons, 2nd is EQ
- ssa.OpPPC64FGreaterEqual: {cond: ppc64.C_COND_GT, valueIfCond: 1}, // 2 comparisons, 2nd is EQ
-}
-
// markMoves marks any MOVXconst ops that need to avoid clobbering flags.
func ssaMarkMoves(s *gc.SSAGenState, b *ssa.Block) {
// flive := b.FlagsLiveAtEnd
@@ -120,17 +98,6 @@ func storeByType(t *types.Type) obj.As {
panic("bad store type")
}
-func ssaGenISEL(s *gc.SSAGenState, v *ssa.Value, cr int64, r1, r2 int16) {
- r := v.Reg()
- p := s.Prog(ppc64.AISEL)
- p.To.Type = obj.TYPE_REG
- p.To.Reg = r
- p.Reg = r1
- p.SetFrom3(obj.Addr{Type: obj.TYPE_REG, Reg: r2})
- p.From.Type = obj.TYPE_CONST
- p.From.Offset = cr
-}
-
func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
switch v.Op {
case ssa.OpCopy:
@@ -843,43 +810,32 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
p.To.Reg = v.Args[0].Reg()
gc.AddAux(&p.To, v)
- case ssa.OpPPC64Equal,
- ssa.OpPPC64NotEqual,
- ssa.OpPPC64LessThan,
- ssa.OpPPC64FLessThan,
- ssa.OpPPC64LessEqual,
- ssa.OpPPC64GreaterThan,
- ssa.OpPPC64FGreaterThan,
- ssa.OpPPC64GreaterEqual:
-
- // On Power7 or later, can use isel instruction:
- // for a < b, a > b, a = b:
- // rtmp := 1
- // isel rt,rtmp,r0,cond // rt is target in ppc asm
-
- // for a >= b, a <= b, a != b:
- // rtmp := 1
- // isel rt,0,rtmp,!cond // rt is target in ppc asm
-
- p := s.Prog(ppc64.AMOVD)
- p.From.Type = obj.TYPE_CONST
- p.From.Offset = 1
+ case ssa.OpPPC64ISEL, ssa.OpPPC64ISELB:
+ // ISEL, ISELB
+ // AuxInt value indicates condition: 0=LT 1=GT 2=EQ 4=GE 5=LE 6=NE
+ // ISEL only accepts 0, 1, 2 condition values but the others can be
+ // achieved by swapping operand order.
+ // arg0 ? arg1 : arg2 with conditions LT, GT, EQ
+ // arg0 ? arg2 : arg1 for conditions GE, LE, NE
+ // ISELB is used when a boolean result is needed, returning 0 or 1
+ p := s.Prog(ppc64.AISEL)
p.To.Type = obj.TYPE_REG
- p.To.Reg = iselRegs[1]
- iop := iselOps[v.Op]
- ssaGenISEL(s, v, iop.cond, iselRegs[iop.valueIfCond], iselRegs[1-iop.valueIfCond])
-
- case ssa.OpPPC64FLessEqual, // These include a second branch for EQ -- dealing with NaN prevents REL= to !REL conversion
- ssa.OpPPC64FGreaterEqual:
-
- p := s.Prog(ppc64.AMOVD)
+ p.To.Reg = v.Reg()
+ // For ISELB, boolean result 0 or 1. Use R0 for 0 operand to avoid load.
+ r := obj.Addr{Type: obj.TYPE_REG, Reg: ppc64.REG_R0}
+ if v.Op == ssa.OpPPC64ISEL {
+ r.Reg = v.Args[1].Reg()
+ }
+ // AuxInt values 4,5,6 implemented with reverse operand order from 0,1,2
+ if v.AuxInt > 3 {
+ p.Reg = r.Reg
+ p.SetFrom3(obj.Addr{Type: obj.TYPE_REG, Reg: v.Args[0].Reg()})
+ } else {
+ p.Reg = v.Args[0].Reg()
+ p.SetFrom3(r)
+ }
p.From.Type = obj.TYPE_CONST
- p.From.Offset = 1
- p.To.Type = obj.TYPE_REG
- p.To.Reg = iselRegs[1]
- iop := iselOps[v.Op]
- ssaGenISEL(s, v, iop.cond, iselRegs[iop.valueIfCond], iselRegs[1-iop.valueIfCond])
- ssaGenISEL(s, v, ppc64.C_COND_EQ, iselRegs[1], v.Reg())
+ p.From.Offset = v.AuxInt & 3
case ssa.OpPPC64LoweredZero:
@@ -1265,6 +1221,11 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
gc.Warnl(v.Pos, "generated nil check")
}
+ // These should be resolved by rules and not make it here.
+ case ssa.OpPPC64Equal, ssa.OpPPC64NotEqual, ssa.OpPPC64LessThan, ssa.OpPPC64FLessThan,
+ ssa.OpPPC64LessEqual, ssa.OpPPC64GreaterThan, ssa.OpPPC64FGreaterThan, ssa.OpPPC64GreaterEqual,
+ ssa.OpPPC64FLessEqual, ssa.OpPPC64FGreaterEqual:
+ v.Fatalf("Pseudo-op should not make it to codegen: %s ###\n", v.LongString())
case ssa.OpPPC64InvertFlags:
v.Fatalf("InvertFlags should never make it to codegen %v", v.LongString())
case ssa.OpPPC64FlagEQ, ssa.OpPPC64FlagLT, ssa.OpPPC64FlagGT:
diff --git a/src/cmd/compile/internal/ssa/gen/PPC64.rules b/src/cmd/compile/internal/ssa/gen/PPC64.rules
index 01656df610..b247a0b99e 100644
--- a/src/cmd/compile/internal/ssa/gen/PPC64.rules
+++ b/src/cmd/compile/internal/ssa/gen/PPC64.rules
@@ -542,6 +542,9 @@
((EQ|NE|LT|LE|GT|GE) (CMPconst [0] z:(OR x y)) yes no) && z.Uses == 1 -> ((EQ|NE|LT|LE|GT|GE) (ORCC x y) yes no)
((EQ|NE|LT|LE|GT|GE) (CMPconst [0] z:(XOR x y)) yes no) && z.Uses == 1 -> ((EQ|NE|LT|LE|GT|GE) (XORCC x y) yes no)
+(CondSelect x y bool) && flagArg(bool) != nil -> (ISEL [2] x y bool)
+(CondSelect x y bool) && flagArg(bool) == nil -> (ISEL [2] x y (CMPWconst [0] bool))
+
// Lowering loads
(Load <t> ptr mem) && (is64BitInt(t) || isPtr(t)) -> (MOVDload ptr mem)
(Load <t> ptr mem) && is32BitInt(t) && isSigned(t) -> (MOVWload ptr mem)
@@ -1019,6 +1022,59 @@
(CMPWU x (MOVDconst [c])) && isU16Bit(c) -> (CMPWUconst x [c])
(CMPWU (MOVDconst [c]) y) && isU16Bit(c) -> (InvertFlags (CMPWUconst y [c]))
+// ISEL auxInt values 0=LT 1=GT 2=EQ arg2 ? arg0 : arg1
+// ISEL auxInt values 4=GE 5=LE 6=NE arg2 ? arg1 : arg0
+// ISELB special case where arg0, arg1 values are 0, 1
+
+(Equal cmp) -> (ISELB [2] (MOVDconst [1]) cmp)
+(NotEqual cmp) -> (ISELB [6] (MOVDconst [1]) cmp)
+(LessThan cmp) -> (ISELB [0] (MOVDconst [1]) cmp)
+(FLessThan cmp) -> (ISELB [0] (MOVDconst [1]) cmp)
+(FLessEqual cmp) -> (ISEL [2] (MOVDconst [1]) (ISELB [0] (MOVDconst [1]) cmp) cmp)
+(GreaterEqual cmp) -> (ISELB [4] (MOVDconst [1]) cmp)
+(GreaterThan cmp) -> (ISELB [1] (MOVDconst [1]) cmp)
+(FGreaterThan cmp) -> (ISELB [1] (MOVDconst [1]) cmp)
+(FGreaterEqual cmp) -> (ISEL [2] (MOVDconst [1]) (ISELB [1] (MOVDconst [1]) cmp) cmp)
+(LessEqual cmp) -> (ISELB [5] (MOVDconst [1]) cmp)
+
+(ISELB [0] _ (FlagLT)) -> (MOVDconst [1])
+(ISELB [0] _ (Flag(GT|EQ))) -> (MOVDconst [0])
+(ISELB [1] _ (FlagGT)) -> (MOVDconst [1])
+(ISELB [1] _ (Flag(LT|EQ))) -> (MOVDconst [0])
+(ISELB [2] _ (FlagEQ)) -> (MOVDconst [1])
+(ISELB [2] _ (Flag(LT|GT))) -> (MOVDconst [0])
+(ISELB [4] _ (FlagLT)) -> (MOVDconst [0])
+(ISELB [4] _ (Flag(GT|EQ))) -> (MOVDconst [1])
+(ISELB [5] _ (FlagGT)) -> (MOVDconst [0])
+(ISELB [5] _ (Flag(LT|EQ))) -> (MOVDconst [1])
+(ISELB [6] _ (FlagEQ)) -> (MOVDconst [0])
+(ISELB [6] _ (Flag(LT|GT))) -> (MOVDconst [1])
+
+(ISEL [2] x _ (FlagEQ)) -> x
+(ISEL [2] _ y (Flag(LT|GT))) -> y
+
+(ISEL [6] _ y (FlagEQ)) -> y
+(ISEL [6] x _ (Flag(LT|GT))) -> x
+
+(ISEL [0] _ y (Flag(EQ|GT))) -> y
+(ISEL [0] x _ (FlagLT)) -> x
+
+(ISEL [5] _ x (Flag(EQ|LT))) -> x
+(ISEL [5] y _ (FlagGT)) -> y
+
+(ISEL [1] _ y (Flag(EQ|LT))) -> y
+(ISEL [1] x _ (FlagGT)) -> x
+
+(ISEL [4] x _ (Flag(EQ|GT))) -> x
+(ISEL [4] _ y (FlagLT)) -> y
+
+(ISELB [n] (MOVDconst [1]) (InvertFlags bool)) && n%4 == 0 -> (ISELB [n+1] (MOVDconst [1]) bool)
+(ISELB [n] (MOVDconst [1]) (InvertFlags bool)) && n%4 == 1 -> (ISELB [n-1] (MOVDconst [1]) bool)
+(ISELB [n] (MOVDconst [1]) (InvertFlags bool)) && n%4 == 2 -> (ISELB [n] (MOVDconst [1]) bool)
+(ISEL [n] x y (InvertFlags bool)) && n%4 == 0 -> (ISEL [n+1] x y bool)
+(ISEL [n] x y (InvertFlags bool)) && n%4 == 1 -> (ISEL [n-1] x y bool)
+(ISEL [n] x y (InvertFlags bool)) && n%4 == 2 -> (ISEL [n] x y bool)
+
// A particular pattern seen in cgo code:
(AND (MOVDconst [c]) x:(MOVBZload _ _)) -> (ANDconst [c&0xFF] x)
(AND x:(MOVBZload _ _) (MOVDconst [c])) -> (ANDconst [c&0xFF] x)
diff --git a/src/cmd/compile/internal/ssa/gen/PPC64Ops.go b/src/cmd/compile/internal/ssa/gen/PPC64Ops.go
index af72774765..5ed8ed41f4 100644
--- a/src/cmd/compile/internal/ssa/gen/PPC64Ops.go
+++ b/src/cmd/compile/internal/ssa/gen/PPC64Ops.go
@@ -140,6 +140,8 @@ func init() {
gp1cr = regInfo{inputs: []regMask{gp | sp | sb}}
gp2cr = regInfo{inputs: []regMask{gp | sp | sb, gp | sp | sb}}
crgp = regInfo{inputs: nil, outputs: []regMask{gp}}
+ crgp11 = regInfo{inputs: []regMask{gp}, outputs: []regMask{gp}}
+ crgp21 = regInfo{inputs: []regMask{gp, gp}, outputs: []regMask{gp}}
gpload = regInfo{inputs: []regMask{gp | sp | sb}, outputs: []regMask{gp}}
gploadidx = regInfo{inputs: []regMask{gp | sp | sb, gp}, outputs: []regMask{gp}}
gpstore = regInfo{inputs: []regMask{gp | sp | sb, gp | sp | sb}}
@@ -365,6 +367,12 @@ func init() {
{name: "CMPWconst", argLength: 1, reg: gp1cr, asm: "CMPW", aux: "Int32", typ: "Flags"},
{name: "CMPWUconst", argLength: 1, reg: gp1cr, asm: "CMPWU", aux: "Int32", typ: "Flags"},
+ // ISEL auxInt values 0=LT 1=GT 2=EQ arg2 ? arg0 : arg1
+ // ISEL auxInt values 4=GE 5=LE 6=NE arg2 ? arg1 : arg0
+ // ISELB special case where arg0, arg1 values are 0, 1 for boolean result
+ {name: "ISEL", argLength: 3, reg: crgp21, asm: "ISEL", aux: "Int32", typ: "Int32"}, // see above
+ {name: "ISELB", argLength: 2, reg: crgp11, asm: "ISEL", aux: "Int32", typ: "Int32"}, // see above
+
// pseudo-ops
{name: "Equal", argLength: 1, reg: crgp}, // bool, true flags encode x==y false otherwise.
{name: "NotEqual", argLength: 1, reg: crgp}, // bool, true flags encode x!=y false otherwise.
diff --git a/src/cmd/compile/internal/ssa/opGen.go b/src/cmd/compile/internal/ssa/opGen.go
index ab3ffcbe19..69d33e3bd5 100644
--- a/src/cmd/compile/internal/ssa/opGen.go
+++ b/src/cmd/compile/internal/ssa/opGen.go
@@ -1811,6 +1811,8 @@ const (
OpPPC64CMPUconst
OpPPC64CMPWconst
OpPPC64CMPWUconst
+ OpPPC64ISEL
+ OpPPC64ISELB
OpPPC64Equal
OpPPC64NotEqual
OpPPC64LessThan
@@ -24213,6 +24215,35 @@ var opcodeTable = [...]opInfo{
},
},
{
+ name: "ISEL",
+ auxType: auxInt32,
+ argLen: 3,
+ asm: ppc64.AISEL,
+ reg: regInfo{
+ inputs: []inputInfo{
+ {0, 1073733624}, // R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R14 R15 R16 R17 R18 R19 R20 R21 R22 R23 R24 R25 R26 R27 R28 R29
+ {1, 1073733624}, // R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R14 R15 R16 R17 R18 R19 R20 R21 R22 R23 R24 R25 R26 R27 R28 R29
+ },
+ outputs: []outputInfo{
+ {0, 1073733624}, // R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R14 R15 R16 R17 R18 R19 R20 R21 R22 R23 R24 R25 R26 R27 R28 R29
+ },
+ },
+ },
+ {
+ name: "ISELB",
+ auxType: auxInt32,
+ argLen: 2,
+ asm: ppc64.AISEL,
+ reg: regInfo{
+ inputs: []inputInfo{
+ {0, 1073733624}, // R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R14 R15 R16 R17 R18 R19 R20 R21 R22 R23 R24 R25 R26 R27 R28 R29
+ },
+ outputs: []outputInfo{
+ {0, 1073733624}, // R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R14 R15 R16 R17 R18 R19 R20 R21 R22 R23 R24 R25 R26 R27 R28 R29
+ },
+ },
+ },
+ {
name: "Equal",
argLen: 1,
reg: regInfo{
diff --git a/src/cmd/compile/internal/ssa/rewritePPC64.go b/src/cmd/compile/internal/ssa/rewritePPC64.go
index 1bc16515d4..33b2bc057f 100644
--- a/src/cmd/compile/internal/ssa/rewritePPC64.go
+++ b/src/cmd/compile/internal/ssa/rewritePPC64.go
@@ -91,6 +91,8 @@ func rewriteValuePPC64(v *Value) bool {
return rewriteValuePPC64_OpCom64_0(v)
case OpCom8:
return rewriteValuePPC64_OpCom8_0(v)
+ case OpCondSelect:
+ return rewriteValuePPC64_OpCondSelect_0(v)
case OpConst16:
return rewriteValuePPC64_OpConst16_0(v)
case OpConst32:
@@ -429,6 +431,14 @@ func rewriteValuePPC64(v *Value) bool {
return rewriteValuePPC64_OpPPC64FCEIL_0(v)
case OpPPC64FFLOOR:
return rewriteValuePPC64_OpPPC64FFLOOR_0(v)
+ case OpPPC64FGreaterEqual:
+ return rewriteValuePPC64_OpPPC64FGreaterEqual_0(v)
+ case OpPPC64FGreaterThan:
+ return rewriteValuePPC64_OpPPC64FGreaterThan_0(v)
+ case OpPPC64FLessEqual:
+ return rewriteValuePPC64_OpPPC64FLessEqual_0(v)
+ case OpPPC64FLessThan:
+ return rewriteValuePPC64_OpPPC64FLessThan_0(v)
case OpPPC64FMOVDload:
return rewriteValuePPC64_OpPPC64FMOVDload_0(v)
case OpPPC64FMOVDstore:
@@ -451,6 +461,10 @@ func rewriteValuePPC64(v *Value) bool {
return rewriteValuePPC64_OpPPC64GreaterEqual_0(v)
case OpPPC64GreaterThan:
return rewriteValuePPC64_OpPPC64GreaterThan_0(v)
+ case OpPPC64ISEL:
+ return rewriteValuePPC64_OpPPC64ISEL_0(v) || rewriteValuePPC64_OpPPC64ISEL_10(v) || rewriteValuePPC64_OpPPC64ISEL_20(v)
+ case OpPPC64ISELB:
+ return rewriteValuePPC64_OpPPC64ISELB_0(v) || rewriteValuePPC64_OpPPC64ISELB_10(v) || rewriteValuePPC64_OpPPC64ISELB_20(v)
case OpPPC64LessEqual:
return rewriteValuePPC64_OpPPC64LessEqual_0(v)
case OpPPC64LessThan:
@@ -1296,6 +1310,47 @@ func rewriteValuePPC64_OpCom8_0(v *Value) bool {
return true
}
}
+func rewriteValuePPC64_OpCondSelect_0(v *Value) bool {
+ b := v.Block
+ // match: (CondSelect x y bool)
+ // cond: flagArg(bool) != nil
+ // result: (ISEL [2] x y bool)
+ for {
+ bool := v.Args[2]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(flagArg(bool) != nil) {
+ break
+ }
+ v.reset(OpPPC64ISEL)
+ v.AuxInt = 2
+ v.AddArg(x)
+ v.AddArg(y)
+ v.AddArg(bool)
+ return true
+ }
+ // match: (CondSelect x y bool)
+ // cond: flagArg(bool) == nil
+ // result: (ISEL [2] x y (CMPWconst [0] bool))
+ for {
+ bool := v.Args[2]
+ x := v.Args[0]
+ y := v.Args[1]
+ if !(flagArg(bool) == nil) {
+ break
+ }
+ v.reset(OpPPC64ISEL)
+ v.AuxInt = 2
+ v.AddArg(x)
+ v.AddArg(y)
+ v0 := b.NewValue0(v.Pos, OpPPC64CMPWconst, types.TypeFlags)
+ v0.AuxInt = 0
+ v0.AddArg(bool)
+ v.AddArg(v0)
+ return true
+ }
+ return false
+}
func rewriteValuePPC64_OpConst16_0(v *Value) bool {
// match: (Const16 [val])
// cond:
@@ -6441,6 +6496,8 @@ func rewriteValuePPC64_OpPPC64CMPconst_0(v *Value) bool {
return false
}
func rewriteValuePPC64_OpPPC64Equal_0(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
// match: (Equal (FlagEQ))
// cond:
// result: (MOVDconst [1])
@@ -6490,7 +6547,19 @@ func rewriteValuePPC64_OpPPC64Equal_0(v *Value) bool {
v.AddArg(x)
return true
}
- return false
+ // match: (Equal cmp)
+ // cond:
+ // result: (ISELB [2] (MOVDconst [1]) cmp)
+ for {
+ cmp := v.Args[0]
+ v.reset(OpPPC64ISELB)
+ v.AuxInt = 2
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v0.AuxInt = 1
+ v.AddArg(v0)
+ v.AddArg(cmp)
+ return true
+ }
}
func rewriteValuePPC64_OpPPC64FABS_0(v *Value) bool {
// match: (FABS (FMOVDconst [x]))
@@ -6616,6 +6685,88 @@ func rewriteValuePPC64_OpPPC64FFLOOR_0(v *Value) bool {
}
return false
}
+func rewriteValuePPC64_OpPPC64FGreaterEqual_0(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (FGreaterEqual cmp)
+ // cond:
+ // result: (ISEL [2] (MOVDconst [1]) (ISELB [1] (MOVDconst [1]) cmp) cmp)
+ for {
+ cmp := v.Args[0]
+ v.reset(OpPPC64ISEL)
+ v.AuxInt = 2
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v0.AuxInt = 1
+ v.AddArg(v0)
+ v1 := b.NewValue0(v.Pos, OpPPC64ISELB, typ.Int32)
+ v1.AuxInt = 1
+ v2 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v2.AuxInt = 1
+ v1.AddArg(v2)
+ v1.AddArg(cmp)
+ v.AddArg(v1)
+ v.AddArg(cmp)
+ return true
+ }
+}
+func rewriteValuePPC64_OpPPC64FGreaterThan_0(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (FGreaterThan cmp)
+ // cond:
+ // result: (ISELB [1] (MOVDconst [1]) cmp)
+ for {
+ cmp := v.Args[0]
+ v.reset(OpPPC64ISELB)
+ v.AuxInt = 1
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v0.AuxInt = 1
+ v.AddArg(v0)
+ v.AddArg(cmp)
+ return true
+ }
+}
+func rewriteValuePPC64_OpPPC64FLessEqual_0(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (FLessEqual cmp)
+ // cond:
+ // result: (ISEL [2] (MOVDconst [1]) (ISELB [0] (MOVDconst [1]) cmp) cmp)
+ for {
+ cmp := v.Args[0]
+ v.reset(OpPPC64ISEL)
+ v.AuxInt = 2
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v0.AuxInt = 1
+ v.AddArg(v0)
+ v1 := b.NewValue0(v.Pos, OpPPC64ISELB, typ.Int32)
+ v1.AuxInt = 0
+ v2 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v2.AuxInt = 1
+ v1.AddArg(v2)
+ v1.AddArg(cmp)
+ v.AddArg(v1)
+ v.AddArg(cmp)
+ return true
+ }
+}
+func rewriteValuePPC64_OpPPC64FLessThan_0(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (FLessThan cmp)
+ // cond:
+ // result: (ISELB [0] (MOVDconst [1]) cmp)
+ for {
+ cmp := v.Args[0]
+ v.reset(OpPPC64ISELB)
+ v.AuxInt = 0
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v0.AuxInt = 1
+ v.AddArg(v0)
+ v.AddArg(cmp)
+ return true
+ }
+}
func rewriteValuePPC64_OpPPC64FMOVDload_0(v *Value) bool {
// match: (FMOVDload [off] {sym} ptr (MOVDstore [off] {sym} ptr x _))
// cond:
@@ -6974,6 +7125,8 @@ func rewriteValuePPC64_OpPPC64FTRUNC_0(v *Value) bool {
return false
}
func rewriteValuePPC64_OpPPC64GreaterEqual_0(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
// match: (GreaterEqual (FlagEQ))
// cond:
// result: (MOVDconst [1])
@@ -7023,9 +7176,23 @@ func rewriteValuePPC64_OpPPC64GreaterEqual_0(v *Value) bool {
v.AddArg(x)
return true
}
- return false
+ // match: (GreaterEqual cmp)
+ // cond:
+ // result: (ISELB [4] (MOVDconst [1]) cmp)
+ for {
+ cmp := v.Args[0]
+ v.reset(OpPPC64ISELB)
+ v.AuxInt = 4
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v0.AuxInt = 1
+ v.AddArg(v0)
+ v.AddArg(cmp)
+ return true
+ }
}
func rewriteValuePPC64_OpPPC64GreaterThan_0(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
// match: (GreaterThan (FlagEQ))
// cond:
// result: (MOVDconst [0])
@@ -7075,9 +7242,813 @@ func rewriteValuePPC64_OpPPC64GreaterThan_0(v *Value) bool {
v.AddArg(x)
return true
}
+ // match: (GreaterThan cmp)
+ // cond:
+ // result: (ISELB [1] (MOVDconst [1]) cmp)
+ for {
+ cmp := v.Args[0]
+ v.reset(OpPPC64ISELB)
+ v.AuxInt = 1
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v0.AuxInt = 1
+ v.AddArg(v0)
+ v.AddArg(cmp)
+ return true
+ }
+}
+func rewriteValuePPC64_OpPPC64ISEL_0(v *Value) bool {
+ // match: (ISEL [2] x _ (FlagEQ))
+ // cond:
+ // result: x
+ for {
+ if v.AuxInt != 2 {
+ break
+ }
+ _ = v.Args[2]
+ x := v.Args[0]
+ v_2 := v.Args[2]
+ if v_2.Op != OpPPC64FlagEQ {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = x.Type
+ v.AddArg(x)
+ return true
+ }
+ // match: (ISEL [2] _ y (FlagLT))
+ // cond:
+ // result: y
+ for {
+ if v.AuxInt != 2 {
+ break
+ }
+ _ = v.Args[2]
+ y := v.Args[1]
+ v_2 := v.Args[2]
+ if v_2.Op != OpPPC64FlagLT {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (ISEL [2] _ y (FlagGT))
+ // cond:
+ // result: y
+ for {
+ if v.AuxInt != 2 {
+ break
+ }
+ _ = v.Args[2]
+ y := v.Args[1]
+ v_2 := v.Args[2]
+ if v_2.Op != OpPPC64FlagGT {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (ISEL [6] _ y (FlagEQ))
+ // cond:
+ // result: y
+ for {
+ if v.AuxInt != 6 {
+ break
+ }
+ _ = v.Args[2]
+ y := v.Args[1]
+ v_2 := v.Args[2]
+ if v_2.Op != OpPPC64FlagEQ {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (ISEL [6] x _ (FlagLT))
+ // cond:
+ // result: x
+ for {
+ if v.AuxInt != 6 {
+ break
+ }
+ _ = v.Args[2]
+ x := v.Args[0]
+ v_2 := v.Args[2]
+ if v_2.Op != OpPPC64FlagLT {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = x.Type
+ v.AddArg(x)
+ return true
+ }
+ // match: (ISEL [6] x _ (FlagGT))
+ // cond:
+ // result: x
+ for {
+ if v.AuxInt != 6 {
+ break
+ }
+ _ = v.Args[2]
+ x := v.Args[0]
+ v_2 := v.Args[2]
+ if v_2.Op != OpPPC64FlagGT {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = x.Type
+ v.AddArg(x)
+ return true
+ }
+ // match: (ISEL [0] _ y (FlagEQ))
+ // cond:
+ // result: y
+ for {
+ if v.AuxInt != 0 {
+ break
+ }
+ _ = v.Args[2]
+ y := v.Args[1]
+ v_2 := v.Args[2]
+ if v_2.Op != OpPPC64FlagEQ {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (ISEL [0] _ y (FlagGT))
+ // cond:
+ // result: y
+ for {
+ if v.AuxInt != 0 {
+ break
+ }
+ _ = v.Args[2]
+ y := v.Args[1]
+ v_2 := v.Args[2]
+ if v_2.Op != OpPPC64FlagGT {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (ISEL [0] x _ (FlagLT))
+ // cond:
+ // result: x
+ for {
+ if v.AuxInt != 0 {
+ break
+ }
+ _ = v.Args[2]
+ x := v.Args[0]
+ v_2 := v.Args[2]
+ if v_2.Op != OpPPC64FlagLT {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = x.Type
+ v.AddArg(x)
+ return true
+ }
+ // match: (ISEL [5] _ x (FlagEQ))
+ // cond:
+ // result: x
+ for {
+ if v.AuxInt != 5 {
+ break
+ }
+ _ = v.Args[2]
+ x := v.Args[1]
+ v_2 := v.Args[2]
+ if v_2.Op != OpPPC64FlagEQ {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = x.Type
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
+func rewriteValuePPC64_OpPPC64ISEL_10(v *Value) bool {
+ // match: (ISEL [5] _ x (FlagLT))
+ // cond:
+ // result: x
+ for {
+ if v.AuxInt != 5 {
+ break
+ }
+ _ = v.Args[2]
+ x := v.Args[1]
+ v_2 := v.Args[2]
+ if v_2.Op != OpPPC64FlagLT {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = x.Type
+ v.AddArg(x)
+ return true
+ }
+ // match: (ISEL [5] y _ (FlagGT))
+ // cond:
+ // result: y
+ for {
+ if v.AuxInt != 5 {
+ break
+ }
+ _ = v.Args[2]
+ y := v.Args[0]
+ v_2 := v.Args[2]
+ if v_2.Op != OpPPC64FlagGT {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (ISEL [1] _ y (FlagEQ))
+ // cond:
+ // result: y
+ for {
+ if v.AuxInt != 1 {
+ break
+ }
+ _ = v.Args[2]
+ y := v.Args[1]
+ v_2 := v.Args[2]
+ if v_2.Op != OpPPC64FlagEQ {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (ISEL [1] _ y (FlagLT))
+ // cond:
+ // result: y
+ for {
+ if v.AuxInt != 1 {
+ break
+ }
+ _ = v.Args[2]
+ y := v.Args[1]
+ v_2 := v.Args[2]
+ if v_2.Op != OpPPC64FlagLT {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (ISEL [1] x _ (FlagGT))
+ // cond:
+ // result: x
+ for {
+ if v.AuxInt != 1 {
+ break
+ }
+ _ = v.Args[2]
+ x := v.Args[0]
+ v_2 := v.Args[2]
+ if v_2.Op != OpPPC64FlagGT {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = x.Type
+ v.AddArg(x)
+ return true
+ }
+ // match: (ISEL [4] x _ (FlagEQ))
+ // cond:
+ // result: x
+ for {
+ if v.AuxInt != 4 {
+ break
+ }
+ _ = v.Args[2]
+ x := v.Args[0]
+ v_2 := v.Args[2]
+ if v_2.Op != OpPPC64FlagEQ {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = x.Type
+ v.AddArg(x)
+ return true
+ }
+ // match: (ISEL [4] x _ (FlagGT))
+ // cond:
+ // result: x
+ for {
+ if v.AuxInt != 4 {
+ break
+ }
+ _ = v.Args[2]
+ x := v.Args[0]
+ v_2 := v.Args[2]
+ if v_2.Op != OpPPC64FlagGT {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = x.Type
+ v.AddArg(x)
+ return true
+ }
+ // match: (ISEL [4] _ y (FlagLT))
+ // cond:
+ // result: y
+ for {
+ if v.AuxInt != 4 {
+ break
+ }
+ _ = v.Args[2]
+ y := v.Args[1]
+ v_2 := v.Args[2]
+ if v_2.Op != OpPPC64FlagLT {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (ISEL [n] x y (InvertFlags bool))
+ // cond: n%4 == 0
+ // result: (ISEL [n+1] x y bool)
+ for {
+ n := v.AuxInt
+ _ = v.Args[2]
+ x := v.Args[0]
+ y := v.Args[1]
+ v_2 := v.Args[2]
+ if v_2.Op != OpPPC64InvertFlags {
+ break
+ }
+ bool := v_2.Args[0]
+ if !(n%4 == 0) {
+ break
+ }
+ v.reset(OpPPC64ISEL)
+ v.AuxInt = n + 1
+ v.AddArg(x)
+ v.AddArg(y)
+ v.AddArg(bool)
+ return true
+ }
+ // match: (ISEL [n] x y (InvertFlags bool))
+ // cond: n%4 == 1
+ // result: (ISEL [n-1] x y bool)
+ for {
+ n := v.AuxInt
+ _ = v.Args[2]
+ x := v.Args[0]
+ y := v.Args[1]
+ v_2 := v.Args[2]
+ if v_2.Op != OpPPC64InvertFlags {
+ break
+ }
+ bool := v_2.Args[0]
+ if !(n%4 == 1) {
+ break
+ }
+ v.reset(OpPPC64ISEL)
+ v.AuxInt = n - 1
+ v.AddArg(x)
+ v.AddArg(y)
+ v.AddArg(bool)
+ return true
+ }
+ return false
+}
+func rewriteValuePPC64_OpPPC64ISEL_20(v *Value) bool {
+ // match: (ISEL [n] x y (InvertFlags bool))
+ // cond: n%4 == 2
+ // result: (ISEL [n] x y bool)
+ for {
+ n := v.AuxInt
+ _ = v.Args[2]
+ x := v.Args[0]
+ y := v.Args[1]
+ v_2 := v.Args[2]
+ if v_2.Op != OpPPC64InvertFlags {
+ break
+ }
+ bool := v_2.Args[0]
+ if !(n%4 == 2) {
+ break
+ }
+ v.reset(OpPPC64ISEL)
+ v.AuxInt = n
+ v.AddArg(x)
+ v.AddArg(y)
+ v.AddArg(bool)
+ return true
+ }
+ return false
+}
+func rewriteValuePPC64_OpPPC64ISELB_0(v *Value) bool {
+ // match: (ISELB [0] _ (FlagLT))
+ // cond:
+ // result: (MOVDconst [1])
+ for {
+ if v.AuxInt != 0 {
+ break
+ }
+ _ = v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64FlagLT {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 1
+ return true
+ }
+ // match: (ISELB [0] _ (FlagGT))
+ // cond:
+ // result: (MOVDconst [0])
+ for {
+ if v.AuxInt != 0 {
+ break
+ }
+ _ = v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64FlagGT {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 0
+ return true
+ }
+ // match: (ISELB [0] _ (FlagEQ))
+ // cond:
+ // result: (MOVDconst [0])
+ for {
+ if v.AuxInt != 0 {
+ break
+ }
+ _ = v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64FlagEQ {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 0
+ return true
+ }
+ // match: (ISELB [1] _ (FlagGT))
+ // cond:
+ // result: (MOVDconst [1])
+ for {
+ if v.AuxInt != 1 {
+ break
+ }
+ _ = v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64FlagGT {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 1
+ return true
+ }
+ // match: (ISELB [1] _ (FlagLT))
+ // cond:
+ // result: (MOVDconst [0])
+ for {
+ if v.AuxInt != 1 {
+ break
+ }
+ _ = v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64FlagLT {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 0
+ return true
+ }
+ // match: (ISELB [1] _ (FlagEQ))
+ // cond:
+ // result: (MOVDconst [0])
+ for {
+ if v.AuxInt != 1 {
+ break
+ }
+ _ = v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64FlagEQ {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 0
+ return true
+ }
+ // match: (ISELB [2] _ (FlagEQ))
+ // cond:
+ // result: (MOVDconst [1])
+ for {
+ if v.AuxInt != 2 {
+ break
+ }
+ _ = v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64FlagEQ {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 1
+ return true
+ }
+ // match: (ISELB [2] _ (FlagLT))
+ // cond:
+ // result: (MOVDconst [0])
+ for {
+ if v.AuxInt != 2 {
+ break
+ }
+ _ = v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64FlagLT {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 0
+ return true
+ }
+ // match: (ISELB [2] _ (FlagGT))
+ // cond:
+ // result: (MOVDconst [0])
+ for {
+ if v.AuxInt != 2 {
+ break
+ }
+ _ = v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64FlagGT {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 0
+ return true
+ }
+ // match: (ISELB [4] _ (FlagLT))
+ // cond:
+ // result: (MOVDconst [0])
+ for {
+ if v.AuxInt != 4 {
+ break
+ }
+ _ = v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64FlagLT {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 0
+ return true
+ }
+ return false
+}
+func rewriteValuePPC64_OpPPC64ISELB_10(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (ISELB [4] _ (FlagGT))
+ // cond:
+ // result: (MOVDconst [1])
+ for {
+ if v.AuxInt != 4 {
+ break
+ }
+ _ = v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64FlagGT {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 1
+ return true
+ }
+ // match: (ISELB [4] _ (FlagEQ))
+ // cond:
+ // result: (MOVDconst [1])
+ for {
+ if v.AuxInt != 4 {
+ break
+ }
+ _ = v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64FlagEQ {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 1
+ return true
+ }
+ // match: (ISELB [5] _ (FlagGT))
+ // cond:
+ // result: (MOVDconst [0])
+ for {
+ if v.AuxInt != 5 {
+ break
+ }
+ _ = v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64FlagGT {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 0
+ return true
+ }
+ // match: (ISELB [5] _ (FlagLT))
+ // cond:
+ // result: (MOVDconst [1])
+ for {
+ if v.AuxInt != 5 {
+ break
+ }
+ _ = v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64FlagLT {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 1
+ return true
+ }
+ // match: (ISELB [5] _ (FlagEQ))
+ // cond:
+ // result: (MOVDconst [1])
+ for {
+ if v.AuxInt != 5 {
+ break
+ }
+ _ = v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64FlagEQ {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 1
+ return true
+ }
+ // match: (ISELB [6] _ (FlagEQ))
+ // cond:
+ // result: (MOVDconst [0])
+ for {
+ if v.AuxInt != 6 {
+ break
+ }
+ _ = v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64FlagEQ {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 0
+ return true
+ }
+ // match: (ISELB [6] _ (FlagLT))
+ // cond:
+ // result: (MOVDconst [1])
+ for {
+ if v.AuxInt != 6 {
+ break
+ }
+ _ = v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64FlagLT {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 1
+ return true
+ }
+ // match: (ISELB [6] _ (FlagGT))
+ // cond:
+ // result: (MOVDconst [1])
+ for {
+ if v.AuxInt != 6 {
+ break
+ }
+ _ = v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64FlagGT {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 1
+ return true
+ }
+ // match: (ISELB [n] (MOVDconst [1]) (InvertFlags bool))
+ // cond: n%4 == 0
+ // result: (ISELB [n+1] (MOVDconst [1]) bool)
+ for {
+ n := v.AuxInt
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_0.AuxInt != 1 {
+ break
+ }
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64InvertFlags {
+ break
+ }
+ bool := v_1.Args[0]
+ if !(n%4 == 0) {
+ break
+ }
+ v.reset(OpPPC64ISELB)
+ v.AuxInt = n + 1
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v0.AuxInt = 1
+ v.AddArg(v0)
+ v.AddArg(bool)
+ return true
+ }
+ // match: (ISELB [n] (MOVDconst [1]) (InvertFlags bool))
+ // cond: n%4 == 1
+ // result: (ISELB [n-1] (MOVDconst [1]) bool)
+ for {
+ n := v.AuxInt
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_0.AuxInt != 1 {
+ break
+ }
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64InvertFlags {
+ break
+ }
+ bool := v_1.Args[0]
+ if !(n%4 == 1) {
+ break
+ }
+ v.reset(OpPPC64ISELB)
+ v.AuxInt = n - 1
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v0.AuxInt = 1
+ v.AddArg(v0)
+ v.AddArg(bool)
+ return true
+ }
+ return false
+}
+func rewriteValuePPC64_OpPPC64ISELB_20(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (ISELB [n] (MOVDconst [1]) (InvertFlags bool))
+ // cond: n%4 == 2
+ // result: (ISELB [n] (MOVDconst [1]) bool)
+ for {
+ n := v.AuxInt
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ if v_0.AuxInt != 1 {
+ break
+ }
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64InvertFlags {
+ break
+ }
+ bool := v_1.Args[0]
+ if !(n%4 == 2) {
+ break
+ }
+ v.reset(OpPPC64ISELB)
+ v.AuxInt = n
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v0.AuxInt = 1
+ v.AddArg(v0)
+ v.AddArg(bool)
+ return true
+ }
return false
}
func rewriteValuePPC64_OpPPC64LessEqual_0(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
// match: (LessEqual (FlagEQ))
// cond:
// result: (MOVDconst [1])
@@ -7127,9 +8098,23 @@ func rewriteValuePPC64_OpPPC64LessEqual_0(v *Value) bool {
v.AddArg(x)
return true
}
- return false
+ // match: (LessEqual cmp)
+ // cond:
+ // result: (ISELB [5] (MOVDconst [1]) cmp)
+ for {
+ cmp := v.Args[0]
+ v.reset(OpPPC64ISELB)
+ v.AuxInt = 5
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v0.AuxInt = 1
+ v.AddArg(v0)
+ v.AddArg(cmp)
+ return true
+ }
}
func rewriteValuePPC64_OpPPC64LessThan_0(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
// match: (LessThan (FlagEQ))
// cond:
// result: (MOVDconst [0])
@@ -7179,7 +8164,19 @@ func rewriteValuePPC64_OpPPC64LessThan_0(v *Value) bool {
v.AddArg(x)
return true
}
- return false
+ // match: (LessThan cmp)
+ // cond:
+ // result: (ISELB [0] (MOVDconst [1]) cmp)
+ for {
+ cmp := v.Args[0]
+ v.reset(OpPPC64ISELB)
+ v.AuxInt = 0
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v0.AuxInt = 1
+ v.AddArg(v0)
+ v.AddArg(cmp)
+ return true
+ }
}
func rewriteValuePPC64_OpPPC64MFVSRD_0(v *Value) bool {
b := v.Block
@@ -12030,6 +13027,8 @@ func rewriteValuePPC64_OpPPC64MaskIfNotCarry_0(v *Value) bool {
return false
}
func rewriteValuePPC64_OpPPC64NotEqual_0(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
// match: (NotEqual (FlagEQ))
// cond:
// result: (MOVDconst [0])
@@ -12079,7 +13078,19 @@ func rewriteValuePPC64_OpPPC64NotEqual_0(v *Value) bool {
v.AddArg(x)
return true
}
- return false
+ // match: (NotEqual cmp)
+ // cond:
+ // result: (ISELB [6] (MOVDconst [1]) cmp)
+ for {
+ cmp := v.Args[0]
+ v.reset(OpPPC64ISELB)
+ v.AuxInt = 6
+ v0 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v0.AuxInt = 1
+ v.AddArg(v0)
+ v.AddArg(cmp)
+ return true
+ }
}
func rewriteValuePPC64_OpPPC64OR_0(v *Value) bool {
b := v.Block