diff options
Diffstat (limited to 'src/cmd/compile/internal/ssa/rewritegeneric.go')
| -rw-r--r-- | src/cmd/compile/internal/ssa/rewritegeneric.go | 7936 |
1 files changed, 7936 insertions, 0 deletions
diff --git a/src/cmd/compile/internal/ssa/rewritegeneric.go b/src/cmd/compile/internal/ssa/rewritegeneric.go new file mode 100644 index 0000000000..ad2abc5601 --- /dev/null +++ b/src/cmd/compile/internal/ssa/rewritegeneric.go @@ -0,0 +1,7936 @@ +// autogenerated from gen/generic.rules: do not edit! +// generated with: cd gen; go run *.go + +package ssa + +import "math" + +var _ = math.MinInt8 // in case not otherwise used +func rewriteValuegeneric(v *Value, config *Config) bool { + switch v.Op { + case OpAdd16: + return rewriteValuegeneric_OpAdd16(v, config) + case OpAdd32: + return rewriteValuegeneric_OpAdd32(v, config) + case OpAdd64: + return rewriteValuegeneric_OpAdd64(v, config) + case OpAdd8: + return rewriteValuegeneric_OpAdd8(v, config) + case OpAnd16: + return rewriteValuegeneric_OpAnd16(v, config) + case OpAnd32: + return rewriteValuegeneric_OpAnd32(v, config) + case OpAnd64: + return rewriteValuegeneric_OpAnd64(v, config) + case OpAnd8: + return rewriteValuegeneric_OpAnd8(v, config) + case OpArg: + return rewriteValuegeneric_OpArg(v, config) + case OpArrayIndex: + return rewriteValuegeneric_OpArrayIndex(v, config) + case OpCom16: + return rewriteValuegeneric_OpCom16(v, config) + case OpCom32: + return rewriteValuegeneric_OpCom32(v, config) + case OpCom64: + return rewriteValuegeneric_OpCom64(v, config) + case OpCom8: + return rewriteValuegeneric_OpCom8(v, config) + case OpComplexImag: + return rewriteValuegeneric_OpComplexImag(v, config) + case OpComplexReal: + return rewriteValuegeneric_OpComplexReal(v, config) + case OpConstInterface: + return rewriteValuegeneric_OpConstInterface(v, config) + case OpConstSlice: + return rewriteValuegeneric_OpConstSlice(v, config) + case OpConstString: + return rewriteValuegeneric_OpConstString(v, config) + case OpConvert: + return rewriteValuegeneric_OpConvert(v, config) + case OpDiv64: + return rewriteValuegeneric_OpDiv64(v, config) + case OpDiv64u: + return rewriteValuegeneric_OpDiv64u(v, config) + case OpEq16: + return rewriteValuegeneric_OpEq16(v, config) + case OpEq32: + return rewriteValuegeneric_OpEq32(v, config) + case OpEq64: + return rewriteValuegeneric_OpEq64(v, config) + case OpEq8: + return rewriteValuegeneric_OpEq8(v, config) + case OpEqInter: + return rewriteValuegeneric_OpEqInter(v, config) + case OpEqPtr: + return rewriteValuegeneric_OpEqPtr(v, config) + case OpEqSlice: + return rewriteValuegeneric_OpEqSlice(v, config) + case OpGeq16: + return rewriteValuegeneric_OpGeq16(v, config) + case OpGeq16U: + return rewriteValuegeneric_OpGeq16U(v, config) + case OpGeq32: + return rewriteValuegeneric_OpGeq32(v, config) + case OpGeq32U: + return rewriteValuegeneric_OpGeq32U(v, config) + case OpGeq64: + return rewriteValuegeneric_OpGeq64(v, config) + case OpGeq64U: + return rewriteValuegeneric_OpGeq64U(v, config) + case OpGeq8: + return rewriteValuegeneric_OpGeq8(v, config) + case OpGeq8U: + return rewriteValuegeneric_OpGeq8U(v, config) + case OpGreater16: + return rewriteValuegeneric_OpGreater16(v, config) + case OpGreater16U: + return rewriteValuegeneric_OpGreater16U(v, config) + case OpGreater32: + return rewriteValuegeneric_OpGreater32(v, config) + case OpGreater32U: + return rewriteValuegeneric_OpGreater32U(v, config) + case OpGreater64: + return rewriteValuegeneric_OpGreater64(v, config) + case OpGreater64U: + return rewriteValuegeneric_OpGreater64U(v, config) + case OpGreater8: + return rewriteValuegeneric_OpGreater8(v, config) + case OpGreater8U: + return rewriteValuegeneric_OpGreater8U(v, config) + case OpIData: + return rewriteValuegeneric_OpIData(v, config) + case OpITab: + return rewriteValuegeneric_OpITab(v, config) + case OpIsInBounds: + return rewriteValuegeneric_OpIsInBounds(v, config) + case OpIsSliceInBounds: + return rewriteValuegeneric_OpIsSliceInBounds(v, config) + case OpLeq16: + return rewriteValuegeneric_OpLeq16(v, config) + case OpLeq16U: + return rewriteValuegeneric_OpLeq16U(v, config) + case OpLeq32: + return rewriteValuegeneric_OpLeq32(v, config) + case OpLeq32U: + return rewriteValuegeneric_OpLeq32U(v, config) + case OpLeq64: + return rewriteValuegeneric_OpLeq64(v, config) + case OpLeq64U: + return rewriteValuegeneric_OpLeq64U(v, config) + case OpLeq8: + return rewriteValuegeneric_OpLeq8(v, config) + case OpLeq8U: + return rewriteValuegeneric_OpLeq8U(v, config) + case OpLess16: + return rewriteValuegeneric_OpLess16(v, config) + case OpLess16U: + return rewriteValuegeneric_OpLess16U(v, config) + case OpLess32: + return rewriteValuegeneric_OpLess32(v, config) + case OpLess32U: + return rewriteValuegeneric_OpLess32U(v, config) + case OpLess64: + return rewriteValuegeneric_OpLess64(v, config) + case OpLess64U: + return rewriteValuegeneric_OpLess64U(v, config) + case OpLess8: + return rewriteValuegeneric_OpLess8(v, config) + case OpLess8U: + return rewriteValuegeneric_OpLess8U(v, config) + case OpLoad: + return rewriteValuegeneric_OpLoad(v, config) + case OpLsh16x16: + return rewriteValuegeneric_OpLsh16x16(v, config) + case OpLsh16x32: + return rewriteValuegeneric_OpLsh16x32(v, config) + case OpLsh16x64: + return rewriteValuegeneric_OpLsh16x64(v, config) + case OpLsh16x8: + return rewriteValuegeneric_OpLsh16x8(v, config) + case OpLsh32x16: + return rewriteValuegeneric_OpLsh32x16(v, config) + case OpLsh32x32: + return rewriteValuegeneric_OpLsh32x32(v, config) + case OpLsh32x64: + return rewriteValuegeneric_OpLsh32x64(v, config) + case OpLsh32x8: + return rewriteValuegeneric_OpLsh32x8(v, config) + case OpLsh64x16: + return rewriteValuegeneric_OpLsh64x16(v, config) + case OpLsh64x32: + return rewriteValuegeneric_OpLsh64x32(v, config) + case OpLsh64x64: + return rewriteValuegeneric_OpLsh64x64(v, config) + case OpLsh64x8: + return rewriteValuegeneric_OpLsh64x8(v, config) + case OpLsh8x16: + return rewriteValuegeneric_OpLsh8x16(v, config) + case OpLsh8x32: + return rewriteValuegeneric_OpLsh8x32(v, config) + case OpLsh8x64: + return rewriteValuegeneric_OpLsh8x64(v, config) + case OpLsh8x8: + return rewriteValuegeneric_OpLsh8x8(v, config) + case OpMod64: + return rewriteValuegeneric_OpMod64(v, config) + case OpMod64u: + return rewriteValuegeneric_OpMod64u(v, config) + case OpMul16: + return rewriteValuegeneric_OpMul16(v, config) + case OpMul32: + return rewriteValuegeneric_OpMul32(v, config) + case OpMul64: + return rewriteValuegeneric_OpMul64(v, config) + case OpMul8: + return rewriteValuegeneric_OpMul8(v, config) + case OpNeg16: + return rewriteValuegeneric_OpNeg16(v, config) + case OpNeg32: + return rewriteValuegeneric_OpNeg32(v, config) + case OpNeg64: + return rewriteValuegeneric_OpNeg64(v, config) + case OpNeg8: + return rewriteValuegeneric_OpNeg8(v, config) + case OpNeq16: + return rewriteValuegeneric_OpNeq16(v, config) + case OpNeq32: + return rewriteValuegeneric_OpNeq32(v, config) + case OpNeq64: + return rewriteValuegeneric_OpNeq64(v, config) + case OpNeq8: + return rewriteValuegeneric_OpNeq8(v, config) + case OpNeqInter: + return rewriteValuegeneric_OpNeqInter(v, config) + case OpNeqPtr: + return rewriteValuegeneric_OpNeqPtr(v, config) + case OpNeqSlice: + return rewriteValuegeneric_OpNeqSlice(v, config) + case OpOr16: + return rewriteValuegeneric_OpOr16(v, config) + case OpOr32: + return rewriteValuegeneric_OpOr32(v, config) + case OpOr64: + return rewriteValuegeneric_OpOr64(v, config) + case OpOr8: + return rewriteValuegeneric_OpOr8(v, config) + case OpPhi: + return rewriteValuegeneric_OpPhi(v, config) + case OpPtrIndex: + return rewriteValuegeneric_OpPtrIndex(v, config) + case OpRsh16Ux16: + return rewriteValuegeneric_OpRsh16Ux16(v, config) + case OpRsh16Ux32: + return rewriteValuegeneric_OpRsh16Ux32(v, config) + case OpRsh16Ux64: + return rewriteValuegeneric_OpRsh16Ux64(v, config) + case OpRsh16Ux8: + return rewriteValuegeneric_OpRsh16Ux8(v, config) + case OpRsh16x16: + return rewriteValuegeneric_OpRsh16x16(v, config) + case OpRsh16x32: + return rewriteValuegeneric_OpRsh16x32(v, config) + case OpRsh16x64: + return rewriteValuegeneric_OpRsh16x64(v, config) + case OpRsh16x8: + return rewriteValuegeneric_OpRsh16x8(v, config) + case OpRsh32Ux16: + return rewriteValuegeneric_OpRsh32Ux16(v, config) + case OpRsh32Ux32: + return rewriteValuegeneric_OpRsh32Ux32(v, config) + case OpRsh32Ux64: + return rewriteValuegeneric_OpRsh32Ux64(v, config) + case OpRsh32Ux8: + return rewriteValuegeneric_OpRsh32Ux8(v, config) + case OpRsh32x16: + return rewriteValuegeneric_OpRsh32x16(v, config) + case OpRsh32x32: + return rewriteValuegeneric_OpRsh32x32(v, config) + case OpRsh32x64: + return rewriteValuegeneric_OpRsh32x64(v, config) + case OpRsh32x8: + return rewriteValuegeneric_OpRsh32x8(v, config) + case OpRsh64Ux16: + return rewriteValuegeneric_OpRsh64Ux16(v, config) + case OpRsh64Ux32: + return rewriteValuegeneric_OpRsh64Ux32(v, config) + case OpRsh64Ux64: + return rewriteValuegeneric_OpRsh64Ux64(v, config) + case OpRsh64Ux8: + return rewriteValuegeneric_OpRsh64Ux8(v, config) + case OpRsh64x16: + return rewriteValuegeneric_OpRsh64x16(v, config) + case OpRsh64x32: + return rewriteValuegeneric_OpRsh64x32(v, config) + case OpRsh64x64: + return rewriteValuegeneric_OpRsh64x64(v, config) + case OpRsh64x8: + return rewriteValuegeneric_OpRsh64x8(v, config) + case OpRsh8Ux16: + return rewriteValuegeneric_OpRsh8Ux16(v, config) + case OpRsh8Ux32: + return rewriteValuegeneric_OpRsh8Ux32(v, config) + case OpRsh8Ux64: + return rewriteValuegeneric_OpRsh8Ux64(v, config) + case OpRsh8Ux8: + return rewriteValuegeneric_OpRsh8Ux8(v, config) + case OpRsh8x16: + return rewriteValuegeneric_OpRsh8x16(v, config) + case OpRsh8x32: + return rewriteValuegeneric_OpRsh8x32(v, config) + case OpRsh8x64: + return rewriteValuegeneric_OpRsh8x64(v, config) + case OpRsh8x8: + return rewriteValuegeneric_OpRsh8x8(v, config) + case OpSliceCap: + return rewriteValuegeneric_OpSliceCap(v, config) + case OpSliceLen: + return rewriteValuegeneric_OpSliceLen(v, config) + case OpSlicePtr: + return rewriteValuegeneric_OpSlicePtr(v, config) + case OpStore: + return rewriteValuegeneric_OpStore(v, config) + case OpStringLen: + return rewriteValuegeneric_OpStringLen(v, config) + case OpStringPtr: + return rewriteValuegeneric_OpStringPtr(v, config) + case OpStructSelect: + return rewriteValuegeneric_OpStructSelect(v, config) + case OpSub16: + return rewriteValuegeneric_OpSub16(v, config) + case OpSub32: + return rewriteValuegeneric_OpSub32(v, config) + case OpSub64: + return rewriteValuegeneric_OpSub64(v, config) + case OpSub8: + return rewriteValuegeneric_OpSub8(v, config) + case OpTrunc16to8: + return rewriteValuegeneric_OpTrunc16to8(v, config) + case OpTrunc32to16: + return rewriteValuegeneric_OpTrunc32to16(v, config) + case OpTrunc32to8: + return rewriteValuegeneric_OpTrunc32to8(v, config) + case OpTrunc64to16: + return rewriteValuegeneric_OpTrunc64to16(v, config) + case OpTrunc64to32: + return rewriteValuegeneric_OpTrunc64to32(v, config) + case OpTrunc64to8: + return rewriteValuegeneric_OpTrunc64to8(v, config) + case OpXor16: + return rewriteValuegeneric_OpXor16(v, config) + case OpXor32: + return rewriteValuegeneric_OpXor32(v, config) + case OpXor64: + return rewriteValuegeneric_OpXor64(v, config) + case OpXor8: + return rewriteValuegeneric_OpXor8(v, config) + } + return false +} +func rewriteValuegeneric_OpAdd16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Add16 (Const16 [c]) (Const16 [d])) + // cond: + // result: (Const16 [c+d]) + for { + if v.Args[0].Op != OpConst16 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst16 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst16) + v.AuxInt = c + d + return true + } + // match: (Add16 x (Const16 <t> [c])) + // cond: x.Op != OpConst16 + // result: (Add16 (Const16 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst16 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst16) { + break + } + v.reset(OpAdd16) + v0 := b.NewValue0(v.Line, OpConst16, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Add16 (Const16 [0]) x) + // cond: + // result: x + for { + if v.Args[0].Op != OpConst16 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + x := v.Args[1] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + return false +} +func rewriteValuegeneric_OpAdd32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Add32 (Const32 [c]) (Const32 [d])) + // cond: + // result: (Const32 [c+d]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst32 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst32) + v.AuxInt = c + d + return true + } + // match: (Add32 x (Const32 <t> [c])) + // cond: x.Op != OpConst32 + // result: (Add32 (Const32 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst32 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst32) { + break + } + v.reset(OpAdd32) + v0 := b.NewValue0(v.Line, OpConst32, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Add32 (Const32 [0]) x) + // cond: + // result: x + for { + if v.Args[0].Op != OpConst32 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + x := v.Args[1] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + return false +} +func rewriteValuegeneric_OpAdd64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Add64 (Const64 [c]) (Const64 [d])) + // cond: + // result: (Const64 [c+d]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst64) + v.AuxInt = c + d + return true + } + // match: (Add64 x (Const64 <t> [c])) + // cond: x.Op != OpConst64 + // result: (Add64 (Const64 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst64) { + break + } + v.reset(OpAdd64) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Add64 (Const64 [0]) x) + // cond: + // result: x + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + x := v.Args[1] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + return false +} +func rewriteValuegeneric_OpAdd8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Add8 (Const8 [c]) (Const8 [d])) + // cond: + // result: (Const8 [c+d]) + for { + if v.Args[0].Op != OpConst8 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst8 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst8) + v.AuxInt = c + d + return true + } + // match: (Add8 x (Const8 <t> [c])) + // cond: x.Op != OpConst8 + // result: (Add8 (Const8 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst8 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst8) { + break + } + v.reset(OpAdd8) + v0 := b.NewValue0(v.Line, OpConst8, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Add8 (Const8 [0]) x) + // cond: + // result: x + for { + if v.Args[0].Op != OpConst8 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + x := v.Args[1] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + return false +} +func rewriteValuegeneric_OpAnd16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (And16 x (Const16 <t> [c])) + // cond: x.Op != OpConst16 + // result: (And16 (Const16 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst16 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst16) { + break + } + v.reset(OpAnd16) + v0 := b.NewValue0(v.Line, OpConst16, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (And16 x x) + // cond: + // result: x + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (And16 (Const16 [-1]) x) + // cond: + // result: x + for { + if v.Args[0].Op != OpConst16 { + break + } + if v.Args[0].AuxInt != -1 { + break + } + x := v.Args[1] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (And16 (Const16 [0]) _) + // cond: + // result: (Const16 [0]) + for { + if v.Args[0].Op != OpConst16 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst16) + v.AuxInt = 0 + return true + } + return false +} +func rewriteValuegeneric_OpAnd32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (And32 x (Const32 <t> [c])) + // cond: x.Op != OpConst32 + // result: (And32 (Const32 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst32 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst32) { + break + } + v.reset(OpAnd32) + v0 := b.NewValue0(v.Line, OpConst32, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (And32 x x) + // cond: + // result: x + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (And32 (Const32 [-1]) x) + // cond: + // result: x + for { + if v.Args[0].Op != OpConst32 { + break + } + if v.Args[0].AuxInt != -1 { + break + } + x := v.Args[1] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (And32 (Const32 [0]) _) + // cond: + // result: (Const32 [0]) + for { + if v.Args[0].Op != OpConst32 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst32) + v.AuxInt = 0 + return true + } + // match: (And32 <t> (Const32 [y]) x) + // cond: nlz(int64(int32(y))) + nto(int64(int32(y))) == 64 + // result: (Rsh32Ux32 (Lsh32x32 <t> x (Const32 <t> [nlz(int64(int32(y)))-32])) (Const32 <t> [nlz(int64(int32(y)))-32])) + for { + t := v.Type + if v.Args[0].Op != OpConst32 { + break + } + y := v.Args[0].AuxInt + x := v.Args[1] + if !(nlz(int64(int32(y)))+nto(int64(int32(y))) == 64) { + break + } + v.reset(OpRsh32Ux32) + v0 := b.NewValue0(v.Line, OpLsh32x32, t) + v0.AddArg(x) + v1 := b.NewValue0(v.Line, OpConst32, t) + v1.AuxInt = nlz(int64(int32(y))) - 32 + v0.AddArg(v1) + v.AddArg(v0) + v2 := b.NewValue0(v.Line, OpConst32, t) + v2.AuxInt = nlz(int64(int32(y))) - 32 + v.AddArg(v2) + return true + } + // match: (And32 <t> (Const32 [y]) x) + // cond: nlo(int64(int32(y))) + ntz(int64(int32(y))) == 64 + // result: (Lsh32x32 (Rsh32Ux32 <t> x (Const32 <t> [ntz(int64(int32(y)))])) (Const32 <t> [ntz(int64(int32(y)))])) + for { + t := v.Type + if v.Args[0].Op != OpConst32 { + break + } + y := v.Args[0].AuxInt + x := v.Args[1] + if !(nlo(int64(int32(y)))+ntz(int64(int32(y))) == 64) { + break + } + v.reset(OpLsh32x32) + v0 := b.NewValue0(v.Line, OpRsh32Ux32, t) + v0.AddArg(x) + v1 := b.NewValue0(v.Line, OpConst32, t) + v1.AuxInt = ntz(int64(int32(y))) + v0.AddArg(v1) + v.AddArg(v0) + v2 := b.NewValue0(v.Line, OpConst32, t) + v2.AuxInt = ntz(int64(int32(y))) + v.AddArg(v2) + return true + } + return false +} +func rewriteValuegeneric_OpAnd64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (And64 x (Const64 <t> [c])) + // cond: x.Op != OpConst64 + // result: (And64 (Const64 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst64) { + break + } + v.reset(OpAnd64) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (And64 x x) + // cond: + // result: x + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (And64 (Const64 [-1]) x) + // cond: + // result: x + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != -1 { + break + } + x := v.Args[1] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (And64 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } + // match: (And64 <t> (Const64 [y]) x) + // cond: nlz(y) + nto(y) == 64 + // result: (Rsh64Ux64 (Lsh64x64 <t> x (Const64 <t> [nlz(y)])) (Const64 <t> [nlz(y)])) + for { + t := v.Type + if v.Args[0].Op != OpConst64 { + break + } + y := v.Args[0].AuxInt + x := v.Args[1] + if !(nlz(y)+nto(y) == 64) { + break + } + v.reset(OpRsh64Ux64) + v0 := b.NewValue0(v.Line, OpLsh64x64, t) + v0.AddArg(x) + v1 := b.NewValue0(v.Line, OpConst64, t) + v1.AuxInt = nlz(y) + v0.AddArg(v1) + v.AddArg(v0) + v2 := b.NewValue0(v.Line, OpConst64, t) + v2.AuxInt = nlz(y) + v.AddArg(v2) + return true + } + // match: (And64 <t> (Const64 [y]) x) + // cond: nlo(y) + ntz(y) == 64 + // result: (Lsh64x64 (Rsh64Ux64 <t> x (Const64 <t> [ntz(y)])) (Const64 <t> [ntz(y)])) + for { + t := v.Type + if v.Args[0].Op != OpConst64 { + break + } + y := v.Args[0].AuxInt + x := v.Args[1] + if !(nlo(y)+ntz(y) == 64) { + break + } + v.reset(OpLsh64x64) + v0 := b.NewValue0(v.Line, OpRsh64Ux64, t) + v0.AddArg(x) + v1 := b.NewValue0(v.Line, OpConst64, t) + v1.AuxInt = ntz(y) + v0.AddArg(v1) + v.AddArg(v0) + v2 := b.NewValue0(v.Line, OpConst64, t) + v2.AuxInt = ntz(y) + v.AddArg(v2) + return true + } + return false +} +func rewriteValuegeneric_OpAnd8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (And8 x (Const8 <t> [c])) + // cond: x.Op != OpConst8 + // result: (And8 (Const8 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst8 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst8) { + break + } + v.reset(OpAnd8) + v0 := b.NewValue0(v.Line, OpConst8, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (And8 x x) + // cond: + // result: x + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (And8 (Const8 [-1]) x) + // cond: + // result: x + for { + if v.Args[0].Op != OpConst8 { + break + } + if v.Args[0].AuxInt != -1 { + break + } + x := v.Args[1] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (And8 (Const8 [0]) _) + // cond: + // result: (Const8 [0]) + for { + if v.Args[0].Op != OpConst8 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst8) + v.AuxInt = 0 + return true + } + return false +} +func rewriteValuegeneric_OpArg(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Arg {n} [off]) + // cond: v.Type.IsString() + // result: (StringMake (Arg <config.fe.TypeBytePtr()> {n} [off]) (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize])) + for { + n := v.Aux + off := v.AuxInt + if !(v.Type.IsString()) { + break + } + v.reset(OpStringMake) + v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) + v0.Aux = n + v0.AuxInt = off + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeInt()) + v1.Aux = n + v1.AuxInt = off + config.PtrSize + v.AddArg(v1) + return true + } + // match: (Arg {n} [off]) + // cond: v.Type.IsSlice() + // result: (SliceMake (Arg <config.fe.TypeBytePtr()> {n} [off]) (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize]) (Arg <config.fe.TypeInt()> {n} [off+2*config.PtrSize])) + for { + n := v.Aux + off := v.AuxInt + if !(v.Type.IsSlice()) { + break + } + v.reset(OpSliceMake) + v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) + v0.Aux = n + v0.AuxInt = off + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeInt()) + v1.Aux = n + v1.AuxInt = off + config.PtrSize + v.AddArg(v1) + v2 := b.NewValue0(v.Line, OpArg, config.fe.TypeInt()) + v2.Aux = n + v2.AuxInt = off + 2*config.PtrSize + v.AddArg(v2) + return true + } + // match: (Arg {n} [off]) + // cond: v.Type.IsInterface() + // result: (IMake (Arg <config.fe.TypeBytePtr()> {n} [off]) (Arg <config.fe.TypeBytePtr()> {n} [off+config.PtrSize])) + for { + n := v.Aux + off := v.AuxInt + if !(v.Type.IsInterface()) { + break + } + v.reset(OpIMake) + v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) + v0.Aux = n + v0.AuxInt = off + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) + v1.Aux = n + v1.AuxInt = off + config.PtrSize + v.AddArg(v1) + return true + } + // match: (Arg {n} [off]) + // cond: v.Type.IsComplex() && v.Type.Size() == 16 + // result: (ComplexMake (Arg <config.fe.TypeFloat64()> {n} [off]) (Arg <config.fe.TypeFloat64()> {n} [off+8])) + for { + n := v.Aux + off := v.AuxInt + if !(v.Type.IsComplex() && v.Type.Size() == 16) { + break + } + v.reset(OpComplexMake) + v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat64()) + v0.Aux = n + v0.AuxInt = off + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat64()) + v1.Aux = n + v1.AuxInt = off + 8 + v.AddArg(v1) + return true + } + // match: (Arg {n} [off]) + // cond: v.Type.IsComplex() && v.Type.Size() == 8 + // result: (ComplexMake (Arg <config.fe.TypeFloat32()> {n} [off]) (Arg <config.fe.TypeFloat32()> {n} [off+4])) + for { + n := v.Aux + off := v.AuxInt + if !(v.Type.IsComplex() && v.Type.Size() == 8) { + break + } + v.reset(OpComplexMake) + v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat32()) + v0.Aux = n + v0.AuxInt = off + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat32()) + v1.Aux = n + v1.AuxInt = off + 4 + v.AddArg(v1) + return true + } + // match: (Arg <t>) + // cond: t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t) + // result: (StructMake0) + for { + t := v.Type + if !(t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)) { + break + } + v.reset(OpStructMake0) + return true + } + // match: (Arg <t> {n} [off]) + // cond: t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t) + // result: (StructMake1 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)])) + for { + t := v.Type + n := v.Aux + off := v.AuxInt + if !(t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)) { + break + } + v.reset(OpStructMake1) + v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) + v0.Aux = n + v0.AuxInt = off + t.FieldOff(0) + v.AddArg(v0) + return true + } + // match: (Arg <t> {n} [off]) + // cond: t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t) + // result: (StructMake2 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)])) + for { + t := v.Type + n := v.Aux + off := v.AuxInt + if !(t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)) { + break + } + v.reset(OpStructMake2) + v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) + v0.Aux = n + v0.AuxInt = off + t.FieldOff(0) + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpArg, t.FieldType(1)) + v1.Aux = n + v1.AuxInt = off + t.FieldOff(1) + v.AddArg(v1) + return true + } + // match: (Arg <t> {n} [off]) + // cond: t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t) + // result: (StructMake3 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]) (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)])) + for { + t := v.Type + n := v.Aux + off := v.AuxInt + if !(t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)) { + break + } + v.reset(OpStructMake3) + v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) + v0.Aux = n + v0.AuxInt = off + t.FieldOff(0) + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpArg, t.FieldType(1)) + v1.Aux = n + v1.AuxInt = off + t.FieldOff(1) + v.AddArg(v1) + v2 := b.NewValue0(v.Line, OpArg, t.FieldType(2)) + v2.Aux = n + v2.AuxInt = off + t.FieldOff(2) + v.AddArg(v2) + return true + } + // match: (Arg <t> {n} [off]) + // cond: t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t) + // result: (StructMake4 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]) (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)]) (Arg <t.FieldType(3)> {n} [off+t.FieldOff(3)])) + for { + t := v.Type + n := v.Aux + off := v.AuxInt + if !(t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)) { + break + } + v.reset(OpStructMake4) + v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) + v0.Aux = n + v0.AuxInt = off + t.FieldOff(0) + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpArg, t.FieldType(1)) + v1.Aux = n + v1.AuxInt = off + t.FieldOff(1) + v.AddArg(v1) + v2 := b.NewValue0(v.Line, OpArg, t.FieldType(2)) + v2.Aux = n + v2.AuxInt = off + t.FieldOff(2) + v.AddArg(v2) + v3 := b.NewValue0(v.Line, OpArg, t.FieldType(3)) + v3.Aux = n + v3.AuxInt = off + t.FieldOff(3) + v.AddArg(v3) + return true + } + return false +} +func rewriteValuegeneric_OpArrayIndex(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (ArrayIndex (Load ptr mem) idx) + // cond: b == v.Args[0].Block + // result: (Load (PtrIndex <v.Type.PtrTo()> ptr idx) mem) + for { + if v.Args[0].Op != OpLoad { + break + } + ptr := v.Args[0].Args[0] + mem := v.Args[0].Args[1] + idx := v.Args[1] + if !(b == v.Args[0].Block) { + break + } + v.reset(OpLoad) + v0 := b.NewValue0(v.Line, OpPtrIndex, v.Type.PtrTo()) + v0.AddArg(ptr) + v0.AddArg(idx) + v.AddArg(v0) + v.AddArg(mem) + return true + } + return false +} +func rewriteValuegeneric_OpCom16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Com16 (Com16 x)) + // cond: + // result: x + for { + if v.Args[0].Op != OpCom16 { + break + } + x := v.Args[0].Args[0] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + return false +} +func rewriteValuegeneric_OpCom32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Com32 (Com32 x)) + // cond: + // result: x + for { + if v.Args[0].Op != OpCom32 { + break + } + x := v.Args[0].Args[0] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + return false +} +func rewriteValuegeneric_OpCom64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Com64 (Com64 x)) + // cond: + // result: x + for { + if v.Args[0].Op != OpCom64 { + break + } + x := v.Args[0].Args[0] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + return false +} +func rewriteValuegeneric_OpCom8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Com8 (Com8 x)) + // cond: + // result: x + for { + if v.Args[0].Op != OpCom8 { + break + } + x := v.Args[0].Args[0] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + return false +} +func rewriteValuegeneric_OpComplexImag(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (ComplexImag (ComplexMake _ imag )) + // cond: + // result: imag + for { + if v.Args[0].Op != OpComplexMake { + break + } + imag := v.Args[0].Args[1] + v.reset(OpCopy) + v.Type = imag.Type + v.AddArg(imag) + return true + } + return false +} +func rewriteValuegeneric_OpComplexReal(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (ComplexReal (ComplexMake real _ )) + // cond: + // result: real + for { + if v.Args[0].Op != OpComplexMake { + break + } + real := v.Args[0].Args[0] + v.reset(OpCopy) + v.Type = real.Type + v.AddArg(real) + return true + } + return false +} +func rewriteValuegeneric_OpConstInterface(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (ConstInterface) + // cond: + // result: (IMake (ConstNil <config.fe.TypeBytePtr()>) (ConstNil <config.fe.TypeBytePtr()>)) + for { + v.reset(OpIMake) + v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) + v.AddArg(v1) + return true + } + return false +} +func rewriteValuegeneric_OpConstSlice(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (ConstSlice) + // cond: config.PtrSize == 4 + // result: (SliceMake (ConstNil <config.fe.TypeBytePtr()>) (Const32 <config.fe.TypeInt()> [0]) (Const32 <config.fe.TypeInt()> [0])) + for { + if !(config.PtrSize == 4) { + break + } + v.reset(OpSliceMake) + v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) + v1.AuxInt = 0 + v.AddArg(v1) + v2 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) + v2.AuxInt = 0 + v.AddArg(v2) + return true + } + // match: (ConstSlice) + // cond: config.PtrSize == 8 + // result: (SliceMake (ConstNil <config.fe.TypeBytePtr()>) (Const64 <config.fe.TypeInt()> [0]) (Const64 <config.fe.TypeInt()> [0])) + for { + if !(config.PtrSize == 8) { + break + } + v.reset(OpSliceMake) + v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) + v1.AuxInt = 0 + v.AddArg(v1) + v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) + v2.AuxInt = 0 + v.AddArg(v2) + return true + } + return false +} +func rewriteValuegeneric_OpConstString(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (ConstString {s}) + // cond: config.PtrSize == 4 && s.(string) == "" + // result: (StringMake (ConstNil) (Const32 <config.fe.TypeInt()> [0])) + for { + s := v.Aux + if !(config.PtrSize == 4 && s.(string) == "") { + break + } + v.reset(OpStringMake) + v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) + v1.AuxInt = 0 + v.AddArg(v1) + return true + } + // match: (ConstString {s}) + // cond: config.PtrSize == 8 && s.(string) == "" + // result: (StringMake (ConstNil) (Const64 <config.fe.TypeInt()> [0])) + for { + s := v.Aux + if !(config.PtrSize == 8 && s.(string) == "") { + break + } + v.reset(OpStringMake) + v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) + v1.AuxInt = 0 + v.AddArg(v1) + return true + } + // match: (ConstString {s}) + // cond: config.PtrSize == 4 && s.(string) != "" + // result: (StringMake (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))} (SB)) (Const32 <config.fe.TypeInt()> [int64(len(s.(string)))])) + for { + s := v.Aux + if !(config.PtrSize == 4 && s.(string) != "") { + break + } + v.reset(OpStringMake) + v0 := b.NewValue0(v.Line, OpAddr, config.fe.TypeBytePtr()) + v0.Aux = config.fe.StringData(s.(string)) + v1 := b.NewValue0(v.Line, OpSB, config.fe.TypeUintptr()) + v0.AddArg(v1) + v.AddArg(v0) + v2 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) + v2.AuxInt = int64(len(s.(string))) + v.AddArg(v2) + return true + } + // match: (ConstString {s}) + // cond: config.PtrSize == 8 && s.(string) != "" + // result: (StringMake (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))} (SB)) (Const64 <config.fe.TypeInt()> [int64(len(s.(string)))])) + for { + s := v.Aux + if !(config.PtrSize == 8 && s.(string) != "") { + break + } + v.reset(OpStringMake) + v0 := b.NewValue0(v.Line, OpAddr, config.fe.TypeBytePtr()) + v0.Aux = config.fe.StringData(s.(string)) + v1 := b.NewValue0(v.Line, OpSB, config.fe.TypeUintptr()) + v0.AddArg(v1) + v.AddArg(v0) + v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) + v2.AuxInt = int64(len(s.(string))) + v.AddArg(v2) + return true + } + return false +} +func rewriteValuegeneric_OpConvert(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Convert (Add64 (Convert ptr mem) off) mem) + // cond: + // result: (Add64 ptr off) + for { + if v.Args[0].Op != OpAdd64 { + break + } + if v.Args[0].Args[0].Op != OpConvert { + break + } + ptr := v.Args[0].Args[0].Args[0] + mem := v.Args[0].Args[0].Args[1] + off := v.Args[0].Args[1] + if v.Args[1] != mem { + break + } + v.reset(OpAdd64) + v.AddArg(ptr) + v.AddArg(off) + return true + } + // match: (Convert (Add64 off (Convert ptr mem)) mem) + // cond: + // result: (Add64 ptr off) + for { + if v.Args[0].Op != OpAdd64 { + break + } + off := v.Args[0].Args[0] + if v.Args[0].Args[1].Op != OpConvert { + break + } + ptr := v.Args[0].Args[1].Args[0] + mem := v.Args[0].Args[1].Args[1] + if v.Args[1] != mem { + break + } + v.reset(OpAdd64) + v.AddArg(ptr) + v.AddArg(off) + return true + } + // match: (Convert (Convert ptr mem) mem) + // cond: + // result: ptr + for { + if v.Args[0].Op != OpConvert { + break + } + ptr := v.Args[0].Args[0] + mem := v.Args[0].Args[1] + if v.Args[1] != mem { + break + } + v.reset(OpCopy) + v.Type = ptr.Type + v.AddArg(ptr) + return true + } + return false +} +func rewriteValuegeneric_OpDiv64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Div64 <t> x (Const64 [c])) + // cond: c > 0 && smagic64ok(c) && smagic64m(c) > 0 + // result: (Sub64 <t> (Rsh64x64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63]))) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + c := v.Args[1].AuxInt + if !(c > 0 && smagic64ok(c) && smagic64m(c) > 0) { + break + } + v.reset(OpSub64) + v.Type = t + v0 := b.NewValue0(v.Line, OpRsh64x64, t) + v1 := b.NewValue0(v.Line, OpHmul64, t) + v2 := b.NewValue0(v.Line, OpConst64, t) + v2.AuxInt = smagic64m(c) + v1.AddArg(v2) + v1.AddArg(x) + v0.AddArg(v1) + v3 := b.NewValue0(v.Line, OpConst64, t) + v3.AuxInt = smagic64s(c) + v0.AddArg(v3) + v.AddArg(v0) + v4 := b.NewValue0(v.Line, OpRsh64x64, t) + v4.AddArg(x) + v5 := b.NewValue0(v.Line, OpConst64, t) + v5.AuxInt = 63 + v4.AddArg(v5) + v.AddArg(v4) + return true + } + // match: (Div64 <t> x (Const64 [c])) + // cond: c > 0 && smagic64ok(c) && smagic64m(c) < 0 + // result: (Sub64 <t> (Rsh64x64 <t> (Add64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63]))) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + c := v.Args[1].AuxInt + if !(c > 0 && smagic64ok(c) && smagic64m(c) < 0) { + break + } + v.reset(OpSub64) + v.Type = t + v0 := b.NewValue0(v.Line, OpRsh64x64, t) + v1 := b.NewValue0(v.Line, OpAdd64, t) + v2 := b.NewValue0(v.Line, OpHmul64, t) + v3 := b.NewValue0(v.Line, OpConst64, t) + v3.AuxInt = smagic64m(c) + v2.AddArg(v3) + v2.AddArg(x) + v1.AddArg(v2) + v1.AddArg(x) + v0.AddArg(v1) + v4 := b.NewValue0(v.Line, OpConst64, t) + v4.AuxInt = smagic64s(c) + v0.AddArg(v4) + v.AddArg(v0) + v5 := b.NewValue0(v.Line, OpRsh64x64, t) + v5.AddArg(x) + v6 := b.NewValue0(v.Line, OpConst64, t) + v6.AuxInt = 63 + v5.AddArg(v6) + v.AddArg(v5) + return true + } + // match: (Div64 <t> x (Const64 [c])) + // cond: c < 0 && smagic64ok(c) && smagic64m(c) > 0 + // result: (Neg64 <t> (Sub64 <t> (Rsh64x64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63])))) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + c := v.Args[1].AuxInt + if !(c < 0 && smagic64ok(c) && smagic64m(c) > 0) { + break + } + v.reset(OpNeg64) + v.Type = t + v0 := b.NewValue0(v.Line, OpSub64, t) + v1 := b.NewValue0(v.Line, OpRsh64x64, t) + v2 := b.NewValue0(v.Line, OpHmul64, t) + v3 := b.NewValue0(v.Line, OpConst64, t) + v3.AuxInt = smagic64m(c) + v2.AddArg(v3) + v2.AddArg(x) + v1.AddArg(v2) + v4 := b.NewValue0(v.Line, OpConst64, t) + v4.AuxInt = smagic64s(c) + v1.AddArg(v4) + v0.AddArg(v1) + v5 := b.NewValue0(v.Line, OpRsh64x64, t) + v5.AddArg(x) + v6 := b.NewValue0(v.Line, OpConst64, t) + v6.AuxInt = 63 + v5.AddArg(v6) + v0.AddArg(v5) + v.AddArg(v0) + return true + } + // match: (Div64 <t> x (Const64 [c])) + // cond: c < 0 && smagic64ok(c) && smagic64m(c) < 0 + // result: (Neg64 <t> (Sub64 <t> (Rsh64x64 <t> (Add64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63])))) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + c := v.Args[1].AuxInt + if !(c < 0 && smagic64ok(c) && smagic64m(c) < 0) { + break + } + v.reset(OpNeg64) + v.Type = t + v0 := b.NewValue0(v.Line, OpSub64, t) + v1 := b.NewValue0(v.Line, OpRsh64x64, t) + v2 := b.NewValue0(v.Line, OpAdd64, t) + v3 := b.NewValue0(v.Line, OpHmul64, t) + v4 := b.NewValue0(v.Line, OpConst64, t) + v4.AuxInt = smagic64m(c) + v3.AddArg(v4) + v3.AddArg(x) + v2.AddArg(v3) + v2.AddArg(x) + v1.AddArg(v2) + v5 := b.NewValue0(v.Line, OpConst64, t) + v5.AuxInt = smagic64s(c) + v1.AddArg(v5) + v0.AddArg(v1) + v6 := b.NewValue0(v.Line, OpRsh64x64, t) + v6.AddArg(x) + v7 := b.NewValue0(v.Line, OpConst64, t) + v7.AuxInt = 63 + v6.AddArg(v7) + v0.AddArg(v6) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpDiv64u(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Div64u <t> x (Const64 [c])) + // cond: umagic64ok(c) && !umagic64a(c) + // result: (Rsh64Ux64 (Hmul64u <t> (Const64 <t> [umagic64m(c)]) x) (Const64 <t> [umagic64s(c)])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + c := v.Args[1].AuxInt + if !(umagic64ok(c) && !umagic64a(c)) { + break + } + v.reset(OpRsh64Ux64) + v0 := b.NewValue0(v.Line, OpHmul64u, t) + v1 := b.NewValue0(v.Line, OpConst64, t) + v1.AuxInt = umagic64m(c) + v0.AddArg(v1) + v0.AddArg(x) + v.AddArg(v0) + v2 := b.NewValue0(v.Line, OpConst64, t) + v2.AuxInt = umagic64s(c) + v.AddArg(v2) + return true + } + // match: (Div64u <t> x (Const64 [c])) + // cond: umagic64ok(c) && umagic64a(c) + // result: (Rsh64Ux64 (Avg64u <t> (Hmul64u <t> x (Const64 <t> [umagic64m(c)])) x) (Const64 <t> [umagic64s(c)-1])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + c := v.Args[1].AuxInt + if !(umagic64ok(c) && umagic64a(c)) { + break + } + v.reset(OpRsh64Ux64) + v0 := b.NewValue0(v.Line, OpAvg64u, t) + v1 := b.NewValue0(v.Line, OpHmul64u, t) + v1.AddArg(x) + v2 := b.NewValue0(v.Line, OpConst64, t) + v2.AuxInt = umagic64m(c) + v1.AddArg(v2) + v0.AddArg(v1) + v0.AddArg(x) + v.AddArg(v0) + v3 := b.NewValue0(v.Line, OpConst64, t) + v3.AuxInt = umagic64s(c) - 1 + v.AddArg(v3) + return true + } + return false +} +func rewriteValuegeneric_OpEq16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Eq16 x x) + // cond: + // result: (ConstBool [1]) + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpConstBool) + v.AuxInt = 1 + return true + } + // match: (Eq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) + // cond: + // result: (Eq16 (Const16 <t> [c-d]) x) + for { + if v.Args[0].Op != OpConst16 { + break + } + t := v.Args[0].Type + c := v.Args[0].AuxInt + if v.Args[1].Op != OpAdd16 { + break + } + if v.Args[1].Args[0].Op != OpConst16 { + break + } + if v.Args[1].Args[0].Type != v.Args[0].Type { + break + } + d := v.Args[1].Args[0].AuxInt + x := v.Args[1].Args[1] + v.reset(OpEq16) + v0 := b.NewValue0(v.Line, OpConst16, t) + v0.AuxInt = c - d + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Eq16 x (Const16 <t> [c])) + // cond: x.Op != OpConst16 + // result: (Eq16 (Const16 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst16 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst16) { + break + } + v.reset(OpEq16) + v0 := b.NewValue0(v.Line, OpConst16, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Eq16 (Const16 [c]) (Const16 [d])) + // cond: + // result: (ConstBool [b2i(int16(c) == int16(d))]) + for { + if v.Args[0].Op != OpConst16 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst16 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int16(c) == int16(d)) + return true + } + return false +} +func rewriteValuegeneric_OpEq32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Eq32 x x) + // cond: + // result: (ConstBool [1]) + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpConstBool) + v.AuxInt = 1 + return true + } + // match: (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) + // cond: + // result: (Eq32 (Const32 <t> [c-d]) x) + for { + if v.Args[0].Op != OpConst32 { + break + } + t := v.Args[0].Type + c := v.Args[0].AuxInt + if v.Args[1].Op != OpAdd32 { + break + } + if v.Args[1].Args[0].Op != OpConst32 { + break + } + if v.Args[1].Args[0].Type != v.Args[0].Type { + break + } + d := v.Args[1].Args[0].AuxInt + x := v.Args[1].Args[1] + v.reset(OpEq32) + v0 := b.NewValue0(v.Line, OpConst32, t) + v0.AuxInt = c - d + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Eq32 x (Const32 <t> [c])) + // cond: x.Op != OpConst32 + // result: (Eq32 (Const32 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst32 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst32) { + break + } + v.reset(OpEq32) + v0 := b.NewValue0(v.Line, OpConst32, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Eq32 (Const32 [c]) (Const32 [d])) + // cond: + // result: (ConstBool [b2i(int32(c) == int32(d))]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst32 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int32(c) == int32(d)) + return true + } + return false +} +func rewriteValuegeneric_OpEq64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Eq64 x x) + // cond: + // result: (ConstBool [1]) + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpConstBool) + v.AuxInt = 1 + return true + } + // match: (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) + // cond: + // result: (Eq64 (Const64 <t> [c-d]) x) + for { + if v.Args[0].Op != OpConst64 { + break + } + t := v.Args[0].Type + c := v.Args[0].AuxInt + if v.Args[1].Op != OpAdd64 { + break + } + if v.Args[1].Args[0].Op != OpConst64 { + break + } + if v.Args[1].Args[0].Type != v.Args[0].Type { + break + } + d := v.Args[1].Args[0].AuxInt + x := v.Args[1].Args[1] + v.reset(OpEq64) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c - d + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Eq64 x (Const64 <t> [c])) + // cond: x.Op != OpConst64 + // result: (Eq64 (Const64 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst64) { + break + } + v.reset(OpEq64) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Eq64 (Const64 [c]) (Const64 [d])) + // cond: + // result: (ConstBool [b2i(int64(c) == int64(d))]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int64(c) == int64(d)) + return true + } + return false +} +func rewriteValuegeneric_OpEq8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Eq8 x x) + // cond: + // result: (ConstBool [1]) + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpConstBool) + v.AuxInt = 1 + return true + } + // match: (Eq8 (ConstBool [c]) (ConstBool [d])) + // cond: + // result: (ConstBool [b2i((int8(c) != 0) == (int8(d) != 0))]) + for { + if v.Args[0].Op != OpConstBool { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConstBool { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i((int8(c) != 0) == (int8(d) != 0)) + return true + } + // match: (Eq8 (ConstBool [0]) x) + // cond: + // result: (Not x) + for { + if v.Args[0].Op != OpConstBool { + break + } + if v.Args[0].AuxInt != 0 { + break + } + x := v.Args[1] + v.reset(OpNot) + v.AddArg(x) + return true + } + // match: (Eq8 (ConstBool [1]) x) + // cond: + // result: x + for { + if v.Args[0].Op != OpConstBool { + break + } + if v.Args[0].AuxInt != 1 { + break + } + x := v.Args[1] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Eq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x)) + // cond: + // result: (Eq8 (Const8 <t> [c-d]) x) + for { + if v.Args[0].Op != OpConst8 { + break + } + t := v.Args[0].Type + c := v.Args[0].AuxInt + if v.Args[1].Op != OpAdd8 { + break + } + if v.Args[1].Args[0].Op != OpConst8 { + break + } + if v.Args[1].Args[0].Type != v.Args[0].Type { + break + } + d := v.Args[1].Args[0].AuxInt + x := v.Args[1].Args[1] + v.reset(OpEq8) + v0 := b.NewValue0(v.Line, OpConst8, t) + v0.AuxInt = c - d + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Eq8 x (Const8 <t> [c])) + // cond: x.Op != OpConst8 + // result: (Eq8 (Const8 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst8 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst8) { + break + } + v.reset(OpEq8) + v0 := b.NewValue0(v.Line, OpConst8, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Eq8 x (ConstBool <t> [c])) + // cond: x.Op != OpConstBool + // result: (Eq8 (ConstBool <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConstBool { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConstBool) { + break + } + v.reset(OpEq8) + v0 := b.NewValue0(v.Line, OpConstBool, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Eq8 (Const8 [c]) (Const8 [d])) + // cond: + // result: (ConstBool [b2i(int8(c) == int8(d))]) + for { + if v.Args[0].Op != OpConst8 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst8 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int8(c) == int8(d)) + return true + } + return false +} +func rewriteValuegeneric_OpEqInter(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (EqInter x y) + // cond: + // result: (EqPtr (ITab x) (ITab y)) + for { + x := v.Args[0] + y := v.Args[1] + v.reset(OpEqPtr) + v0 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) + v0.AddArg(x) + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) + v1.AddArg(y) + v.AddArg(v1) + return true + } + return false +} +func rewriteValuegeneric_OpEqPtr(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (EqPtr p (ConstNil)) + // cond: + // result: (Not (IsNonNil p)) + for { + p := v.Args[0] + if v.Args[1].Op != OpConstNil { + break + } + v.reset(OpNot) + v0 := b.NewValue0(v.Line, OpIsNonNil, config.fe.TypeBool()) + v0.AddArg(p) + v.AddArg(v0) + return true + } + // match: (EqPtr (ConstNil) p) + // cond: + // result: (Not (IsNonNil p)) + for { + if v.Args[0].Op != OpConstNil { + break + } + p := v.Args[1] + v.reset(OpNot) + v0 := b.NewValue0(v.Line, OpIsNonNil, config.fe.TypeBool()) + v0.AddArg(p) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpEqSlice(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (EqSlice x y) + // cond: + // result: (EqPtr (SlicePtr x) (SlicePtr y)) + for { + x := v.Args[0] + y := v.Args[1] + v.reset(OpEqPtr) + v0 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) + v0.AddArg(x) + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) + v1.AddArg(y) + v.AddArg(v1) + return true + } + return false +} +func rewriteValuegeneric_OpGeq16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Geq16 (Const16 [c]) (Const16 [d])) + // cond: + // result: (ConstBool [b2i(int16(c) >= int16(d))]) + for { + if v.Args[0].Op != OpConst16 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst16 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int16(c) >= int16(d)) + return true + } + return false +} +func rewriteValuegeneric_OpGeq16U(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Geq16U (Const16 [c]) (Const16 [d])) + // cond: + // result: (ConstBool [b2i(uint16(c) >= uint16(d))]) + for { + if v.Args[0].Op != OpConst16 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst16 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(uint16(c) >= uint16(d)) + return true + } + return false +} +func rewriteValuegeneric_OpGeq32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Geq32 (Const32 [c]) (Const32 [d])) + // cond: + // result: (ConstBool [b2i(int32(c) >= int32(d))]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst32 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int32(c) >= int32(d)) + return true + } + return false +} +func rewriteValuegeneric_OpGeq32U(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Geq32U (Const32 [c]) (Const32 [d])) + // cond: + // result: (ConstBool [b2i(uint32(c) >= uint32(d))]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst32 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(uint32(c) >= uint32(d)) + return true + } + return false +} +func rewriteValuegeneric_OpGeq64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Geq64 (Const64 [c]) (Const64 [d])) + // cond: + // result: (ConstBool [b2i(int64(c) >= int64(d))]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int64(c) >= int64(d)) + return true + } + return false +} +func rewriteValuegeneric_OpGeq64U(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Geq64U (Const64 [c]) (Const64 [d])) + // cond: + // result: (ConstBool [b2i(uint64(c) >= uint64(d))]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(uint64(c) >= uint64(d)) + return true + } + return false +} +func rewriteValuegeneric_OpGeq8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Geq8 (Const8 [c]) (Const8 [d])) + // cond: + // result: (ConstBool [b2i(int8(c) >= int8(d))]) + for { + if v.Args[0].Op != OpConst8 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst8 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int8(c) >= int8(d)) + return true + } + return false +} +func rewriteValuegeneric_OpGeq8U(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Geq8U (Const8 [c]) (Const8 [d])) + // cond: + // result: (ConstBool [b2i(uint8(c) >= uint8(d))]) + for { + if v.Args[0].Op != OpConst8 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst8 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(uint8(c) >= uint8(d)) + return true + } + return false +} +func rewriteValuegeneric_OpGreater16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Greater16 (Const16 [c]) (Const16 [d])) + // cond: + // result: (ConstBool [b2i(int16(c) > int16(d))]) + for { + if v.Args[0].Op != OpConst16 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst16 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int16(c) > int16(d)) + return true + } + return false +} +func rewriteValuegeneric_OpGreater16U(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Greater16U (Const16 [c]) (Const16 [d])) + // cond: + // result: (ConstBool [b2i(uint16(c) > uint16(d))]) + for { + if v.Args[0].Op != OpConst16 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst16 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(uint16(c) > uint16(d)) + return true + } + return false +} +func rewriteValuegeneric_OpGreater32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Greater32 (Const32 [c]) (Const32 [d])) + // cond: + // result: (ConstBool [b2i(int32(c) > int32(d))]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst32 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int32(c) > int32(d)) + return true + } + return false +} +func rewriteValuegeneric_OpGreater32U(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Greater32U (Const32 [c]) (Const32 [d])) + // cond: + // result: (ConstBool [b2i(uint32(c) > uint32(d))]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst32 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(uint32(c) > uint32(d)) + return true + } + return false +} +func rewriteValuegeneric_OpGreater64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Greater64 (Const64 [c]) (Const64 [d])) + // cond: + // result: (ConstBool [b2i(int64(c) > int64(d))]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int64(c) > int64(d)) + return true + } + return false +} +func rewriteValuegeneric_OpGreater64U(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Greater64U (Const64 [c]) (Const64 [d])) + // cond: + // result: (ConstBool [b2i(uint64(c) > uint64(d))]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(uint64(c) > uint64(d)) + return true + } + return false +} +func rewriteValuegeneric_OpGreater8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Greater8 (Const8 [c]) (Const8 [d])) + // cond: + // result: (ConstBool [b2i(int8(c) > int8(d))]) + for { + if v.Args[0].Op != OpConst8 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst8 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int8(c) > int8(d)) + return true + } + return false +} +func rewriteValuegeneric_OpGreater8U(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Greater8U (Const8 [c]) (Const8 [d])) + // cond: + // result: (ConstBool [b2i(uint8(c) > uint8(d))]) + for { + if v.Args[0].Op != OpConst8 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst8 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(uint8(c) > uint8(d)) + return true + } + return false +} +func rewriteValuegeneric_OpIData(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (IData (IMake _ data)) + // cond: + // result: data + for { + if v.Args[0].Op != OpIMake { + break + } + data := v.Args[0].Args[1] + v.reset(OpCopy) + v.Type = data.Type + v.AddArg(data) + return true + } + return false +} +func rewriteValuegeneric_OpITab(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (ITab (IMake itab _)) + // cond: + // result: itab + for { + if v.Args[0].Op != OpIMake { + break + } + itab := v.Args[0].Args[0] + v.reset(OpCopy) + v.Type = itab.Type + v.AddArg(itab) + return true + } + return false +} +func rewriteValuegeneric_OpIsInBounds(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (IsInBounds (Const32 [c]) (Const32 [d])) + // cond: + // result: (ConstBool [b2i(inBounds32(c,d))]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst32 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(inBounds32(c, d)) + return true + } + // match: (IsInBounds (Const64 [c]) (Const64 [d])) + // cond: + // result: (ConstBool [b2i(inBounds64(c,d))]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(inBounds64(c, d)) + return true + } + return false +} +func rewriteValuegeneric_OpIsSliceInBounds(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (IsSliceInBounds (Const32 [c]) (Const32 [d])) + // cond: + // result: (ConstBool [b2i(sliceInBounds32(c,d))]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst32 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(sliceInBounds32(c, d)) + return true + } + // match: (IsSliceInBounds (Const64 [c]) (Const64 [d])) + // cond: + // result: (ConstBool [b2i(sliceInBounds64(c,d))]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(sliceInBounds64(c, d)) + return true + } + return false +} +func rewriteValuegeneric_OpLeq16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Leq16 (Const16 [c]) (Const16 [d])) + // cond: + // result: (ConstBool [b2i(int16(c) <= int16(d))]) + for { + if v.Args[0].Op != OpConst16 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst16 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int16(c) <= int16(d)) + return true + } + return false +} +func rewriteValuegeneric_OpLeq16U(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Leq16U (Const16 [c]) (Const16 [d])) + // cond: + // result: (ConstBool [b2i(uint16(c) <= uint16(d))]) + for { + if v.Args[0].Op != OpConst16 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst16 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(uint16(c) <= uint16(d)) + return true + } + return false +} +func rewriteValuegeneric_OpLeq32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Leq32 (Const32 [c]) (Const32 [d])) + // cond: + // result: (ConstBool [b2i(int32(c) <= int32(d))]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst32 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int32(c) <= int32(d)) + return true + } + return false +} +func rewriteValuegeneric_OpLeq32U(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Leq32U (Const32 [c]) (Const32 [d])) + // cond: + // result: (ConstBool [b2i(uint32(c) <= uint32(d))]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst32 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(uint32(c) <= uint32(d)) + return true + } + return false +} +func rewriteValuegeneric_OpLeq64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Leq64 (Const64 [c]) (Const64 [d])) + // cond: + // result: (ConstBool [b2i(int64(c) <= int64(d))]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int64(c) <= int64(d)) + return true + } + return false +} +func rewriteValuegeneric_OpLeq64U(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Leq64U (Const64 [c]) (Const64 [d])) + // cond: + // result: (ConstBool [b2i(uint64(c) <= uint64(d))]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(uint64(c) <= uint64(d)) + return true + } + return false +} +func rewriteValuegeneric_OpLeq8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Leq8 (Const8 [c]) (Const8 [d])) + // cond: + // result: (ConstBool [b2i(int8(c) <= int8(d))]) + for { + if v.Args[0].Op != OpConst8 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst8 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int8(c) <= int8(d)) + return true + } + return false +} +func rewriteValuegeneric_OpLeq8U(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Leq8U (Const8 [c]) (Const8 [d])) + // cond: + // result: (ConstBool [b2i(uint8(c) <= uint8(d))]) + for { + if v.Args[0].Op != OpConst8 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst8 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(uint8(c) <= uint8(d)) + return true + } + return false +} +func rewriteValuegeneric_OpLess16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Less16 (Const16 [c]) (Const16 [d])) + // cond: + // result: (ConstBool [b2i(int16(c) < int16(d))]) + for { + if v.Args[0].Op != OpConst16 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst16 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int16(c) < int16(d)) + return true + } + return false +} +func rewriteValuegeneric_OpLess16U(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Less16U (Const16 [c]) (Const16 [d])) + // cond: + // result: (ConstBool [b2i(uint16(c) < uint16(d))]) + for { + if v.Args[0].Op != OpConst16 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst16 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(uint16(c) < uint16(d)) + return true + } + return false +} +func rewriteValuegeneric_OpLess32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Less32 (Const32 [c]) (Const32 [d])) + // cond: + // result: (ConstBool [b2i(int32(c) < int32(d))]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst32 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int32(c) < int32(d)) + return true + } + return false +} +func rewriteValuegeneric_OpLess32U(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Less32U (Const32 [c]) (Const32 [d])) + // cond: + // result: (ConstBool [b2i(uint32(c) < uint32(d))]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst32 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(uint32(c) < uint32(d)) + return true + } + return false +} +func rewriteValuegeneric_OpLess64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Less64 (Const64 [c]) (Const64 [d])) + // cond: + // result: (ConstBool [b2i(int64(c) < int64(d))]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int64(c) < int64(d)) + return true + } + return false +} +func rewriteValuegeneric_OpLess64U(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Less64U (Const64 [c]) (Const64 [d])) + // cond: + // result: (ConstBool [b2i(uint64(c) < uint64(d))]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(uint64(c) < uint64(d)) + return true + } + return false +} +func rewriteValuegeneric_OpLess8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Less8 (Const8 [c]) (Const8 [d])) + // cond: + // result: (ConstBool [b2i(int8(c) < int8(d))]) + for { + if v.Args[0].Op != OpConst8 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst8 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int8(c) < int8(d)) + return true + } + return false +} +func rewriteValuegeneric_OpLess8U(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Less8U (Const8 [c]) (Const8 [d])) + // cond: + // result: (ConstBool [b2i(uint8(c) < uint8(d))]) + for { + if v.Args[0].Op != OpConst8 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst8 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(uint8(c) < uint8(d)) + return true + } + return false +} +func rewriteValuegeneric_OpLoad(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Load <t1> p1 (Store [w] p2 x _)) + // cond: isSamePtr(p1,p2) && t1.Compare(x.Type)==CMPeq && w == t1.Size() + // result: x + for { + t1 := v.Type + p1 := v.Args[0] + if v.Args[1].Op != OpStore { + break + } + w := v.Args[1].AuxInt + p2 := v.Args[1].Args[0] + x := v.Args[1].Args[1] + if !(isSamePtr(p1, p2) && t1.Compare(x.Type) == CMPeq && w == t1.Size()) { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Load <t> _ _) + // cond: t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t) + // result: (StructMake0) + for { + t := v.Type + if !(t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)) { + break + } + v.reset(OpStructMake0) + return true + } + // match: (Load <t> ptr mem) + // cond: t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t) + // result: (StructMake1 (Load <t.FieldType(0)> ptr mem)) + for { + t := v.Type + ptr := v.Args[0] + mem := v.Args[1] + if !(t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)) { + break + } + v.reset(OpStructMake1) + v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) + v0.AddArg(ptr) + v0.AddArg(mem) + v.AddArg(v0) + return true + } + // match: (Load <t> ptr mem) + // cond: t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t) + // result: (StructMake2 (Load <t.FieldType(0)> ptr mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem)) + for { + t := v.Type + ptr := v.Args[0] + mem := v.Args[1] + if !(t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)) { + break + } + v.reset(OpStructMake2) + v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) + v0.AddArg(ptr) + v0.AddArg(mem) + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpLoad, t.FieldType(1)) + v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) + v2.AuxInt = t.FieldOff(1) + v2.AddArg(ptr) + v1.AddArg(v2) + v1.AddArg(mem) + v.AddArg(v1) + return true + } + // match: (Load <t> ptr mem) + // cond: t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t) + // result: (StructMake3 (Load <t.FieldType(0)> ptr mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem)) + for { + t := v.Type + ptr := v.Args[0] + mem := v.Args[1] + if !(t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)) { + break + } + v.reset(OpStructMake3) + v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) + v0.AddArg(ptr) + v0.AddArg(mem) + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpLoad, t.FieldType(1)) + v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) + v2.AuxInt = t.FieldOff(1) + v2.AddArg(ptr) + v1.AddArg(v2) + v1.AddArg(mem) + v.AddArg(v1) + v3 := b.NewValue0(v.Line, OpLoad, t.FieldType(2)) + v4 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) + v4.AuxInt = t.FieldOff(2) + v4.AddArg(ptr) + v3.AddArg(v4) + v3.AddArg(mem) + v.AddArg(v3) + return true + } + // match: (Load <t> ptr mem) + // cond: t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t) + // result: (StructMake4 (Load <t.FieldType(0)> ptr mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem) (Load <t.FieldType(3)> (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] ptr) mem)) + for { + t := v.Type + ptr := v.Args[0] + mem := v.Args[1] + if !(t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)) { + break + } + v.reset(OpStructMake4) + v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) + v0.AddArg(ptr) + v0.AddArg(mem) + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpLoad, t.FieldType(1)) + v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) + v2.AuxInt = t.FieldOff(1) + v2.AddArg(ptr) + v1.AddArg(v2) + v1.AddArg(mem) + v.AddArg(v1) + v3 := b.NewValue0(v.Line, OpLoad, t.FieldType(2)) + v4 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) + v4.AuxInt = t.FieldOff(2) + v4.AddArg(ptr) + v3.AddArg(v4) + v3.AddArg(mem) + v.AddArg(v3) + v5 := b.NewValue0(v.Line, OpLoad, t.FieldType(3)) + v6 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(3).PtrTo()) + v6.AuxInt = t.FieldOff(3) + v6.AddArg(ptr) + v5.AddArg(v6) + v5.AddArg(mem) + v.AddArg(v5) + return true + } + // match: (Load <t> ptr mem) + // cond: t.IsComplex() && t.Size() == 8 + // result: (ComplexMake (Load <config.fe.TypeFloat32()> ptr mem) (Load <config.fe.TypeFloat32()> (OffPtr <config.fe.TypeFloat32().PtrTo()> [4] ptr) mem) ) + for { + t := v.Type + ptr := v.Args[0] + mem := v.Args[1] + if !(t.IsComplex() && t.Size() == 8) { + break + } + v.reset(OpComplexMake) + v0 := b.NewValue0(v.Line, OpLoad, config.fe.TypeFloat32()) + v0.AddArg(ptr) + v0.AddArg(mem) + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpLoad, config.fe.TypeFloat32()) + v2 := b.NewValue0(v.Line, OpOffPtr, config.fe.TypeFloat32().PtrTo()) + v2.AuxInt = 4 + v2.AddArg(ptr) + v1.AddArg(v2) + v1.AddArg(mem) + v.AddArg(v1) + return true + } + // match: (Load <t> ptr mem) + // cond: t.IsComplex() && t.Size() == 16 + // result: (ComplexMake (Load <config.fe.TypeFloat64()> ptr mem) (Load <config.fe.TypeFloat64()> (OffPtr <config.fe.TypeFloat64().PtrTo()> [8] ptr) mem) ) + for { + t := v.Type + ptr := v.Args[0] + mem := v.Args[1] + if !(t.IsComplex() && t.Size() == 16) { + break + } + v.reset(OpComplexMake) + v0 := b.NewValue0(v.Line, OpLoad, config.fe.TypeFloat64()) + v0.AddArg(ptr) + v0.AddArg(mem) + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpLoad, config.fe.TypeFloat64()) + v2 := b.NewValue0(v.Line, OpOffPtr, config.fe.TypeFloat64().PtrTo()) + v2.AuxInt = 8 + v2.AddArg(ptr) + v1.AddArg(v2) + v1.AddArg(mem) + v.AddArg(v1) + return true + } + // match: (Load <t> ptr mem) + // cond: t.IsString() + // result: (StringMake (Load <config.fe.TypeBytePtr()> ptr mem) (Load <config.fe.TypeInt()> (OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] ptr) mem)) + for { + t := v.Type + ptr := v.Args[0] + mem := v.Args[1] + if !(t.IsString()) { + break + } + v.reset(OpStringMake) + v0 := b.NewValue0(v.Line, OpLoad, config.fe.TypeBytePtr()) + v0.AddArg(ptr) + v0.AddArg(mem) + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpLoad, config.fe.TypeInt()) + v2 := b.NewValue0(v.Line, OpOffPtr, config.fe.TypeInt().PtrTo()) + v2.AuxInt = config.PtrSize + v2.AddArg(ptr) + v1.AddArg(v2) + v1.AddArg(mem) + v.AddArg(v1) + return true + } + // match: (Load <t> ptr mem) + // cond: t.IsSlice() + // result: (SliceMake (Load <config.fe.TypeBytePtr()> ptr mem) (Load <config.fe.TypeInt()> (OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] ptr) mem) (Load <config.fe.TypeInt()> (OffPtr <config.fe.TypeInt().PtrTo()> [2*config.PtrSize] ptr) mem)) + for { + t := v.Type + ptr := v.Args[0] + mem := v.Args[1] + if !(t.IsSlice()) { + break + } + v.reset(OpSliceMake) + v0 := b.NewValue0(v.Line, OpLoad, config.fe.TypeBytePtr()) + v0.AddArg(ptr) + v0.AddArg(mem) + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpLoad, config.fe.TypeInt()) + v2 := b.NewValue0(v.Line, OpOffPtr, config.fe.TypeInt().PtrTo()) + v2.AuxInt = config.PtrSize + v2.AddArg(ptr) + v1.AddArg(v2) + v1.AddArg(mem) + v.AddArg(v1) + v3 := b.NewValue0(v.Line, OpLoad, config.fe.TypeInt()) + v4 := b.NewValue0(v.Line, OpOffPtr, config.fe.TypeInt().PtrTo()) + v4.AuxInt = 2 * config.PtrSize + v4.AddArg(ptr) + v3.AddArg(v4) + v3.AddArg(mem) + v.AddArg(v3) + return true + } + // match: (Load <t> ptr mem) + // cond: t.IsInterface() + // result: (IMake (Load <config.fe.TypeBytePtr()> ptr mem) (Load <config.fe.TypeBytePtr()> (OffPtr <config.fe.TypeBytePtr().PtrTo()> [config.PtrSize] ptr) mem)) + for { + t := v.Type + ptr := v.Args[0] + mem := v.Args[1] + if !(t.IsInterface()) { + break + } + v.reset(OpIMake) + v0 := b.NewValue0(v.Line, OpLoad, config.fe.TypeBytePtr()) + v0.AddArg(ptr) + v0.AddArg(mem) + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpLoad, config.fe.TypeBytePtr()) + v2 := b.NewValue0(v.Line, OpOffPtr, config.fe.TypeBytePtr().PtrTo()) + v2.AuxInt = config.PtrSize + v2.AddArg(ptr) + v1.AddArg(v2) + v1.AddArg(mem) + v.AddArg(v1) + return true + } + return false +} +func rewriteValuegeneric_OpLsh16x16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Lsh16x16 <t> x (Const16 [c])) + // cond: + // result: (Lsh16x64 x (Const64 <t> [int64(uint16(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst16 { + break + } + c := v.Args[1].AuxInt + v.reset(OpLsh16x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint16(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpLsh16x32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Lsh16x32 <t> x (Const32 [c])) + // cond: + // result: (Lsh16x64 x (Const64 <t> [int64(uint32(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst32 { + break + } + c := v.Args[1].AuxInt + v.reset(OpLsh16x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint32(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpLsh16x64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Lsh16x64 (Const16 [c]) (Const64 [d])) + // cond: + // result: (Const16 [int64(int16(c) << uint64(d))]) + for { + if v.Args[0].Op != OpConst16 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst16) + v.AuxInt = int64(int16(c) << uint64(d)) + return true + } + // match: (Lsh16x64 (Const16 [0]) _) + // cond: + // result: (Const16 [0]) + for { + if v.Args[0].Op != OpConst16 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst16) + v.AuxInt = 0 + return true + } + // match: (Lsh16x64 x (Const64 [0])) + // cond: + // result: x + for { + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + if v.Args[1].AuxInt != 0 { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Lsh16x64 _ (Const64 [c])) + // cond: uint64(c) >= 16 + // result: (Const16 [0]) + for { + if v.Args[1].Op != OpConst64 { + break + } + c := v.Args[1].AuxInt + if !(uint64(c) >= 16) { + break + } + v.reset(OpConst16) + v.AuxInt = 0 + return true + } + // match: (Lsh16x64 <t> (Lsh16x64 x (Const64 [c])) (Const64 [d])) + // cond: !uaddOvf(c,d) + // result: (Lsh16x64 x (Const64 <t> [c+d])) + for { + t := v.Type + if v.Args[0].Op != OpLsh16x64 { + break + } + x := v.Args[0].Args[0] + if v.Args[0].Args[1].Op != OpConst64 { + break + } + c := v.Args[0].Args[1].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + if !(!uaddOvf(c, d)) { + break + } + v.reset(OpLsh16x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c + d + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpLsh16x8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Lsh16x8 <t> x (Const8 [c])) + // cond: + // result: (Lsh16x64 x (Const64 <t> [int64(uint8(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst8 { + break + } + c := v.Args[1].AuxInt + v.reset(OpLsh16x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint8(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpLsh32x16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Lsh32x16 <t> x (Const16 [c])) + // cond: + // result: (Lsh32x64 x (Const64 <t> [int64(uint16(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst16 { + break + } + c := v.Args[1].AuxInt + v.reset(OpLsh32x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint16(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpLsh32x32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Lsh32x32 <t> x (Const32 [c])) + // cond: + // result: (Lsh32x64 x (Const64 <t> [int64(uint32(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst32 { + break + } + c := v.Args[1].AuxInt + v.reset(OpLsh32x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint32(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpLsh32x64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Lsh32x64 (Const32 [c]) (Const64 [d])) + // cond: + // result: (Const32 [int64(int32(c) << uint64(d))]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst32) + v.AuxInt = int64(int32(c) << uint64(d)) + return true + } + // match: (Lsh32x64 (Const32 [0]) _) + // cond: + // result: (Const32 [0]) + for { + if v.Args[0].Op != OpConst32 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst32) + v.AuxInt = 0 + return true + } + // match: (Lsh32x64 x (Const64 [0])) + // cond: + // result: x + for { + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + if v.Args[1].AuxInt != 0 { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Lsh32x64 _ (Const64 [c])) + // cond: uint64(c) >= 32 + // result: (Const32 [0]) + for { + if v.Args[1].Op != OpConst64 { + break + } + c := v.Args[1].AuxInt + if !(uint64(c) >= 32) { + break + } + v.reset(OpConst32) + v.AuxInt = 0 + return true + } + // match: (Lsh32x64 <t> (Lsh32x64 x (Const64 [c])) (Const64 [d])) + // cond: !uaddOvf(c,d) + // result: (Lsh32x64 x (Const64 <t> [c+d])) + for { + t := v.Type + if v.Args[0].Op != OpLsh32x64 { + break + } + x := v.Args[0].Args[0] + if v.Args[0].Args[1].Op != OpConst64 { + break + } + c := v.Args[0].Args[1].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + if !(!uaddOvf(c, d)) { + break + } + v.reset(OpLsh32x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c + d + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpLsh32x8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Lsh32x8 <t> x (Const8 [c])) + // cond: + // result: (Lsh32x64 x (Const64 <t> [int64(uint8(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst8 { + break + } + c := v.Args[1].AuxInt + v.reset(OpLsh32x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint8(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpLsh64x16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Lsh64x16 <t> x (Const16 [c])) + // cond: + // result: (Lsh64x64 x (Const64 <t> [int64(uint16(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst16 { + break + } + c := v.Args[1].AuxInt + v.reset(OpLsh64x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint16(c)) + v.AddArg(v0) + return true + } + // match: (Lsh64x16 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } + return false +} +func rewriteValuegeneric_OpLsh64x32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Lsh64x32 <t> x (Const32 [c])) + // cond: + // result: (Lsh64x64 x (Const64 <t> [int64(uint32(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst32 { + break + } + c := v.Args[1].AuxInt + v.reset(OpLsh64x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint32(c)) + v.AddArg(v0) + return true + } + // match: (Lsh64x32 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } + return false +} +func rewriteValuegeneric_OpLsh64x64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Lsh64x64 (Const64 [c]) (Const64 [d])) + // cond: + // result: (Const64 [c << uint64(d)]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst64) + v.AuxInt = c << uint64(d) + return true + } + // match: (Lsh64x64 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } + // match: (Lsh64x64 x (Const64 [0])) + // cond: + // result: x + for { + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + if v.Args[1].AuxInt != 0 { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Lsh64x64 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } + // match: (Lsh64x64 _ (Const64 [c])) + // cond: uint64(c) >= 64 + // result: (Const64 [0]) + for { + if v.Args[1].Op != OpConst64 { + break + } + c := v.Args[1].AuxInt + if !(uint64(c) >= 64) { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } + // match: (Lsh64x64 <t> (Lsh64x64 x (Const64 [c])) (Const64 [d])) + // cond: !uaddOvf(c,d) + // result: (Lsh64x64 x (Const64 <t> [c+d])) + for { + t := v.Type + if v.Args[0].Op != OpLsh64x64 { + break + } + x := v.Args[0].Args[0] + if v.Args[0].Args[1].Op != OpConst64 { + break + } + c := v.Args[0].Args[1].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + if !(!uaddOvf(c, d)) { + break + } + v.reset(OpLsh64x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c + d + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpLsh64x8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Lsh64x8 <t> x (Const8 [c])) + // cond: + // result: (Lsh64x64 x (Const64 <t> [int64(uint8(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst8 { + break + } + c := v.Args[1].AuxInt + v.reset(OpLsh64x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint8(c)) + v.AddArg(v0) + return true + } + // match: (Lsh64x8 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } + return false +} +func rewriteValuegeneric_OpLsh8x16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Lsh8x16 <t> x (Const16 [c])) + // cond: + // result: (Lsh8x64 x (Const64 <t> [int64(uint16(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst16 { + break + } + c := v.Args[1].AuxInt + v.reset(OpLsh8x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint16(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpLsh8x32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Lsh8x32 <t> x (Const32 [c])) + // cond: + // result: (Lsh8x64 x (Const64 <t> [int64(uint32(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst32 { + break + } + c := v.Args[1].AuxInt + v.reset(OpLsh8x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint32(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpLsh8x64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Lsh8x64 (Const8 [c]) (Const64 [d])) + // cond: + // result: (Const8 [int64(int8(c) << uint64(d))]) + for { + if v.Args[0].Op != OpConst8 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst8) + v.AuxInt = int64(int8(c) << uint64(d)) + return true + } + // match: (Lsh8x64 (Const8 [0]) _) + // cond: + // result: (Const8 [0]) + for { + if v.Args[0].Op != OpConst8 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst8) + v.AuxInt = 0 + return true + } + // match: (Lsh8x64 x (Const64 [0])) + // cond: + // result: x + for { + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + if v.Args[1].AuxInt != 0 { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Lsh8x64 _ (Const64 [c])) + // cond: uint64(c) >= 8 + // result: (Const8 [0]) + for { + if v.Args[1].Op != OpConst64 { + break + } + c := v.Args[1].AuxInt + if !(uint64(c) >= 8) { + break + } + v.reset(OpConst8) + v.AuxInt = 0 + return true + } + // match: (Lsh8x64 <t> (Lsh8x64 x (Const64 [c])) (Const64 [d])) + // cond: !uaddOvf(c,d) + // result: (Lsh8x64 x (Const64 <t> [c+d])) + for { + t := v.Type + if v.Args[0].Op != OpLsh8x64 { + break + } + x := v.Args[0].Args[0] + if v.Args[0].Args[1].Op != OpConst64 { + break + } + c := v.Args[0].Args[1].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + if !(!uaddOvf(c, d)) { + break + } + v.reset(OpLsh8x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c + d + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpLsh8x8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Lsh8x8 <t> x (Const8 [c])) + // cond: + // result: (Lsh8x64 x (Const64 <t> [int64(uint8(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst8 { + break + } + c := v.Args[1].AuxInt + v.reset(OpLsh8x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint8(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpMod64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Mod64 <t> x (Const64 [c])) + // cond: smagic64ok(c) + // result: (Sub64 x (Mul64 <t> (Div64 <t> x (Const64 <t> [c])) (Const64 <t> [c]))) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + c := v.Args[1].AuxInt + if !(smagic64ok(c)) { + break + } + v.reset(OpSub64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpMul64, t) + v1 := b.NewValue0(v.Line, OpDiv64, t) + v1.AddArg(x) + v2 := b.NewValue0(v.Line, OpConst64, t) + v2.AuxInt = c + v1.AddArg(v2) + v0.AddArg(v1) + v3 := b.NewValue0(v.Line, OpConst64, t) + v3.AuxInt = c + v0.AddArg(v3) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpMod64u(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Mod64u <t> x (Const64 [c])) + // cond: umagic64ok(c) + // result: (Sub64 x (Mul64 <t> (Div64u <t> x (Const64 <t> [c])) (Const64 <t> [c]))) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + c := v.Args[1].AuxInt + if !(umagic64ok(c)) { + break + } + v.reset(OpSub64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpMul64, t) + v1 := b.NewValue0(v.Line, OpDiv64u, t) + v1.AddArg(x) + v2 := b.NewValue0(v.Line, OpConst64, t) + v2.AuxInt = c + v1.AddArg(v2) + v0.AddArg(v1) + v3 := b.NewValue0(v.Line, OpConst64, t) + v3.AuxInt = c + v0.AddArg(v3) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpMul16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Mul16 (Const16 [c]) (Const16 [d])) + // cond: + // result: (Const16 [c*d]) + for { + if v.Args[0].Op != OpConst16 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst16 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst16) + v.AuxInt = c * d + return true + } + // match: (Mul16 x (Const16 <t> [c])) + // cond: x.Op != OpConst16 + // result: (Mul16 (Const16 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst16 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst16) { + break + } + v.reset(OpMul16) + v0 := b.NewValue0(v.Line, OpConst16, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Mul16 (Const16 [0]) _) + // cond: + // result: (Const16 [0]) + for { + if v.Args[0].Op != OpConst16 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst16) + v.AuxInt = 0 + return true + } + return false +} +func rewriteValuegeneric_OpMul32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Mul32 (Const32 [c]) (Const32 [d])) + // cond: + // result: (Const32 [c*d]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst32 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst32) + v.AuxInt = c * d + return true + } + // match: (Mul32 x (Const32 <t> [c])) + // cond: x.Op != OpConst32 + // result: (Mul32 (Const32 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst32 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst32) { + break + } + v.reset(OpMul32) + v0 := b.NewValue0(v.Line, OpConst32, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Mul32 (Const32 <t> [c]) (Add32 <t> (Const32 <t> [d]) x)) + // cond: + // result: (Add32 (Const32 <t> [c*d]) (Mul32 <t> (Const32 <t> [c]) x)) + for { + if v.Args[0].Op != OpConst32 { + break + } + t := v.Args[0].Type + c := v.Args[0].AuxInt + if v.Args[1].Op != OpAdd32 { + break + } + if v.Args[1].Type != v.Args[0].Type { + break + } + if v.Args[1].Args[0].Op != OpConst32 { + break + } + if v.Args[1].Args[0].Type != v.Args[0].Type { + break + } + d := v.Args[1].Args[0].AuxInt + x := v.Args[1].Args[1] + v.reset(OpAdd32) + v0 := b.NewValue0(v.Line, OpConst32, t) + v0.AuxInt = c * d + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpMul32, t) + v2 := b.NewValue0(v.Line, OpConst32, t) + v2.AuxInt = c + v1.AddArg(v2) + v1.AddArg(x) + v.AddArg(v1) + return true + } + // match: (Mul32 (Const32 [0]) _) + // cond: + // result: (Const32 [0]) + for { + if v.Args[0].Op != OpConst32 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst32) + v.AuxInt = 0 + return true + } + return false +} +func rewriteValuegeneric_OpMul64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Mul64 (Const64 [c]) (Const64 [d])) + // cond: + // result: (Const64 [c*d]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst64) + v.AuxInt = c * d + return true + } + // match: (Mul64 x (Const64 <t> [c])) + // cond: x.Op != OpConst64 + // result: (Mul64 (Const64 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst64) { + break + } + v.reset(OpMul64) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Mul64 (Const64 <t> [c]) (Add64 <t> (Const64 <t> [d]) x)) + // cond: + // result: (Add64 (Const64 <t> [c*d]) (Mul64 <t> (Const64 <t> [c]) x)) + for { + if v.Args[0].Op != OpConst64 { + break + } + t := v.Args[0].Type + c := v.Args[0].AuxInt + if v.Args[1].Op != OpAdd64 { + break + } + if v.Args[1].Type != v.Args[0].Type { + break + } + if v.Args[1].Args[0].Op != OpConst64 { + break + } + if v.Args[1].Args[0].Type != v.Args[0].Type { + break + } + d := v.Args[1].Args[0].AuxInt + x := v.Args[1].Args[1] + v.reset(OpAdd64) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c * d + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpMul64, t) + v2 := b.NewValue0(v.Line, OpConst64, t) + v2.AuxInt = c + v1.AddArg(v2) + v1.AddArg(x) + v.AddArg(v1) + return true + } + // match: (Mul64 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } + return false +} +func rewriteValuegeneric_OpMul8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Mul8 (Const8 [c]) (Const8 [d])) + // cond: + // result: (Const8 [c*d]) + for { + if v.Args[0].Op != OpConst8 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst8 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst8) + v.AuxInt = c * d + return true + } + // match: (Mul8 x (Const8 <t> [c])) + // cond: x.Op != OpConst8 + // result: (Mul8 (Const8 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst8 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst8) { + break + } + v.reset(OpMul8) + v0 := b.NewValue0(v.Line, OpConst8, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Mul8 (Const8 [0]) _) + // cond: + // result: (Const8 [0]) + for { + if v.Args[0].Op != OpConst8 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst8) + v.AuxInt = 0 + return true + } + return false +} +func rewriteValuegeneric_OpNeg16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Neg16 (Const16 [c])) + // cond: + // result: (Const16 [-c]) + for { + if v.Args[0].Op != OpConst16 { + break + } + c := v.Args[0].AuxInt + v.reset(OpConst16) + v.AuxInt = -c + return true + } + // match: (Neg16 (Sub16 x y)) + // cond: + // result: (Sub16 y x) + for { + if v.Args[0].Op != OpSub16 { + break + } + x := v.Args[0].Args[0] + y := v.Args[0].Args[1] + v.reset(OpSub16) + v.AddArg(y) + v.AddArg(x) + return true + } + return false +} +func rewriteValuegeneric_OpNeg32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Neg32 (Const32 [c])) + // cond: + // result: (Const32 [-c]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + v.reset(OpConst32) + v.AuxInt = -c + return true + } + // match: (Neg32 (Sub32 x y)) + // cond: + // result: (Sub32 y x) + for { + if v.Args[0].Op != OpSub32 { + break + } + x := v.Args[0].Args[0] + y := v.Args[0].Args[1] + v.reset(OpSub32) + v.AddArg(y) + v.AddArg(x) + return true + } + return false +} +func rewriteValuegeneric_OpNeg64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Neg64 (Const64 [c])) + // cond: + // result: (Const64 [-c]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + v.reset(OpConst64) + v.AuxInt = -c + return true + } + // match: (Neg64 (Sub64 x y)) + // cond: + // result: (Sub64 y x) + for { + if v.Args[0].Op != OpSub64 { + break + } + x := v.Args[0].Args[0] + y := v.Args[0].Args[1] + v.reset(OpSub64) + v.AddArg(y) + v.AddArg(x) + return true + } + return false +} +func rewriteValuegeneric_OpNeg8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Neg8 (Const8 [c])) + // cond: + // result: (Const8 [-c]) + for { + if v.Args[0].Op != OpConst8 { + break + } + c := v.Args[0].AuxInt + v.reset(OpConst8) + v.AuxInt = -c + return true + } + // match: (Neg8 (Sub8 x y)) + // cond: + // result: (Sub8 y x) + for { + if v.Args[0].Op != OpSub8 { + break + } + x := v.Args[0].Args[0] + y := v.Args[0].Args[1] + v.reset(OpSub8) + v.AddArg(y) + v.AddArg(x) + return true + } + return false +} +func rewriteValuegeneric_OpNeq16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Neq16 x x) + // cond: + // result: (ConstBool [0]) + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpConstBool) + v.AuxInt = 0 + return true + } + // match: (Neq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) + // cond: + // result: (Neq16 (Const16 <t> [c-d]) x) + for { + if v.Args[0].Op != OpConst16 { + break + } + t := v.Args[0].Type + c := v.Args[0].AuxInt + if v.Args[1].Op != OpAdd16 { + break + } + if v.Args[1].Args[0].Op != OpConst16 { + break + } + if v.Args[1].Args[0].Type != v.Args[0].Type { + break + } + d := v.Args[1].Args[0].AuxInt + x := v.Args[1].Args[1] + v.reset(OpNeq16) + v0 := b.NewValue0(v.Line, OpConst16, t) + v0.AuxInt = c - d + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Neq16 x (Const16 <t> [c])) + // cond: x.Op != OpConst16 + // result: (Neq16 (Const16 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst16 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst16) { + break + } + v.reset(OpNeq16) + v0 := b.NewValue0(v.Line, OpConst16, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Neq16 (Const16 [c]) (Const16 [d])) + // cond: + // result: (ConstBool [b2i(int16(c) != int16(d))]) + for { + if v.Args[0].Op != OpConst16 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst16 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int16(c) != int16(d)) + return true + } + return false +} +func rewriteValuegeneric_OpNeq32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Neq32 x x) + // cond: + // result: (ConstBool [0]) + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpConstBool) + v.AuxInt = 0 + return true + } + // match: (Neq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) + // cond: + // result: (Neq32 (Const32 <t> [c-d]) x) + for { + if v.Args[0].Op != OpConst32 { + break + } + t := v.Args[0].Type + c := v.Args[0].AuxInt + if v.Args[1].Op != OpAdd32 { + break + } + if v.Args[1].Args[0].Op != OpConst32 { + break + } + if v.Args[1].Args[0].Type != v.Args[0].Type { + break + } + d := v.Args[1].Args[0].AuxInt + x := v.Args[1].Args[1] + v.reset(OpNeq32) + v0 := b.NewValue0(v.Line, OpConst32, t) + v0.AuxInt = c - d + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Neq32 x (Const32 <t> [c])) + // cond: x.Op != OpConst32 + // result: (Neq32 (Const32 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst32 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst32) { + break + } + v.reset(OpNeq32) + v0 := b.NewValue0(v.Line, OpConst32, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Neq32 (Const32 [c]) (Const32 [d])) + // cond: + // result: (ConstBool [b2i(int32(c) != int32(d))]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst32 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int32(c) != int32(d)) + return true + } + return false +} +func rewriteValuegeneric_OpNeq64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Neq64 x x) + // cond: + // result: (ConstBool [0]) + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpConstBool) + v.AuxInt = 0 + return true + } + // match: (Neq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) + // cond: + // result: (Neq64 (Const64 <t> [c-d]) x) + for { + if v.Args[0].Op != OpConst64 { + break + } + t := v.Args[0].Type + c := v.Args[0].AuxInt + if v.Args[1].Op != OpAdd64 { + break + } + if v.Args[1].Args[0].Op != OpConst64 { + break + } + if v.Args[1].Args[0].Type != v.Args[0].Type { + break + } + d := v.Args[1].Args[0].AuxInt + x := v.Args[1].Args[1] + v.reset(OpNeq64) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c - d + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Neq64 x (Const64 <t> [c])) + // cond: x.Op != OpConst64 + // result: (Neq64 (Const64 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst64) { + break + } + v.reset(OpNeq64) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Neq64 (Const64 [c]) (Const64 [d])) + // cond: + // result: (ConstBool [b2i(int64(c) != int64(d))]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int64(c) != int64(d)) + return true + } + return false +} +func rewriteValuegeneric_OpNeq8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Neq8 x x) + // cond: + // result: (ConstBool [0]) + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpConstBool) + v.AuxInt = 0 + return true + } + // match: (Neq8 (ConstBool [c]) (ConstBool [d])) + // cond: + // result: (ConstBool [b2i((int8(c) != 0) != (int8(d) != 0))]) + for { + if v.Args[0].Op != OpConstBool { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConstBool { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i((int8(c) != 0) != (int8(d) != 0)) + return true + } + // match: (Neq8 (ConstBool [0]) x) + // cond: + // result: x + for { + if v.Args[0].Op != OpConstBool { + break + } + if v.Args[0].AuxInt != 0 { + break + } + x := v.Args[1] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Neq8 (ConstBool [1]) x) + // cond: + // result: (Not x) + for { + if v.Args[0].Op != OpConstBool { + break + } + if v.Args[0].AuxInt != 1 { + break + } + x := v.Args[1] + v.reset(OpNot) + v.AddArg(x) + return true + } + // match: (Neq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x)) + // cond: + // result: (Neq8 (Const8 <t> [c-d]) x) + for { + if v.Args[0].Op != OpConst8 { + break + } + t := v.Args[0].Type + c := v.Args[0].AuxInt + if v.Args[1].Op != OpAdd8 { + break + } + if v.Args[1].Args[0].Op != OpConst8 { + break + } + if v.Args[1].Args[0].Type != v.Args[0].Type { + break + } + d := v.Args[1].Args[0].AuxInt + x := v.Args[1].Args[1] + v.reset(OpNeq8) + v0 := b.NewValue0(v.Line, OpConst8, t) + v0.AuxInt = c - d + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Neq8 x (Const8 <t> [c])) + // cond: x.Op != OpConst8 + // result: (Neq8 (Const8 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst8 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst8) { + break + } + v.reset(OpNeq8) + v0 := b.NewValue0(v.Line, OpConst8, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Neq8 x (ConstBool <t> [c])) + // cond: x.Op != OpConstBool + // result: (Neq8 (ConstBool <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConstBool { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConstBool) { + break + } + v.reset(OpNeq8) + v0 := b.NewValue0(v.Line, OpConstBool, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Neq8 (Const8 [c]) (Const8 [d])) + // cond: + // result: (ConstBool [b2i(int8(c) != int8(d))]) + for { + if v.Args[0].Op != OpConst8 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst8 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConstBool) + v.AuxInt = b2i(int8(c) != int8(d)) + return true + } + return false +} +func rewriteValuegeneric_OpNeqInter(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (NeqInter x y) + // cond: + // result: (NeqPtr (ITab x) (ITab y)) + for { + x := v.Args[0] + y := v.Args[1] + v.reset(OpNeqPtr) + v0 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) + v0.AddArg(x) + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) + v1.AddArg(y) + v.AddArg(v1) + return true + } + return false +} +func rewriteValuegeneric_OpNeqPtr(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (NeqPtr p (ConstNil)) + // cond: + // result: (IsNonNil p) + for { + p := v.Args[0] + if v.Args[1].Op != OpConstNil { + break + } + v.reset(OpIsNonNil) + v.AddArg(p) + return true + } + // match: (NeqPtr (ConstNil) p) + // cond: + // result: (IsNonNil p) + for { + if v.Args[0].Op != OpConstNil { + break + } + p := v.Args[1] + v.reset(OpIsNonNil) + v.AddArg(p) + return true + } + return false +} +func rewriteValuegeneric_OpNeqSlice(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (NeqSlice x y) + // cond: + // result: (NeqPtr (SlicePtr x) (SlicePtr y)) + for { + x := v.Args[0] + y := v.Args[1] + v.reset(OpNeqPtr) + v0 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) + v0.AddArg(x) + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) + v1.AddArg(y) + v.AddArg(v1) + return true + } + return false +} +func rewriteValuegeneric_OpOr16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Or16 x (Const16 <t> [c])) + // cond: x.Op != OpConst16 + // result: (Or16 (Const16 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst16 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst16) { + break + } + v.reset(OpOr16) + v0 := b.NewValue0(v.Line, OpConst16, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Or16 x x) + // cond: + // result: x + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Or16 (Const16 [0]) x) + // cond: + // result: x + for { + if v.Args[0].Op != OpConst16 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + x := v.Args[1] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Or16 (Const16 [-1]) _) + // cond: + // result: (Const16 [-1]) + for { + if v.Args[0].Op != OpConst16 { + break + } + if v.Args[0].AuxInt != -1 { + break + } + v.reset(OpConst16) + v.AuxInt = -1 + return true + } + return false +} +func rewriteValuegeneric_OpOr32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Or32 x (Const32 <t> [c])) + // cond: x.Op != OpConst32 + // result: (Or32 (Const32 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst32 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst32) { + break + } + v.reset(OpOr32) + v0 := b.NewValue0(v.Line, OpConst32, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Or32 x x) + // cond: + // result: x + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Or32 (Const32 [0]) x) + // cond: + // result: x + for { + if v.Args[0].Op != OpConst32 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + x := v.Args[1] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Or32 (Const32 [-1]) _) + // cond: + // result: (Const32 [-1]) + for { + if v.Args[0].Op != OpConst32 { + break + } + if v.Args[0].AuxInt != -1 { + break + } + v.reset(OpConst32) + v.AuxInt = -1 + return true + } + return false +} +func rewriteValuegeneric_OpOr64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Or64 x (Const64 <t> [c])) + // cond: x.Op != OpConst64 + // result: (Or64 (Const64 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst64) { + break + } + v.reset(OpOr64) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Or64 x x) + // cond: + // result: x + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Or64 (Const64 [0]) x) + // cond: + // result: x + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + x := v.Args[1] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Or64 (Const64 [-1]) _) + // cond: + // result: (Const64 [-1]) + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != -1 { + break + } + v.reset(OpConst64) + v.AuxInt = -1 + return true + } + return false +} +func rewriteValuegeneric_OpOr8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Or8 x (Const8 <t> [c])) + // cond: x.Op != OpConst8 + // result: (Or8 (Const8 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst8 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst8) { + break + } + v.reset(OpOr8) + v0 := b.NewValue0(v.Line, OpConst8, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Or8 x x) + // cond: + // result: x + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Or8 (Const8 [0]) x) + // cond: + // result: x + for { + if v.Args[0].Op != OpConst8 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + x := v.Args[1] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Or8 (Const8 [-1]) _) + // cond: + // result: (Const8 [-1]) + for { + if v.Args[0].Op != OpConst8 { + break + } + if v.Args[0].AuxInt != -1 { + break + } + v.reset(OpConst8) + v.AuxInt = -1 + return true + } + return false +} +func rewriteValuegeneric_OpPhi(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Phi (Const8 [c]) (Const8 [d])) + // cond: int8(c) == int8(d) + // result: (Const8 [c]) + for { + if v.Args[0].Op != OpConst8 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst8 { + break + } + d := v.Args[1].AuxInt + if len(v.Args) != 2 { + break + } + if !(int8(c) == int8(d)) { + break + } + v.reset(OpConst8) + v.AuxInt = c + return true + } + // match: (Phi (Const16 [c]) (Const16 [d])) + // cond: int16(c) == int16(d) + // result: (Const16 [c]) + for { + if v.Args[0].Op != OpConst16 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst16 { + break + } + d := v.Args[1].AuxInt + if len(v.Args) != 2 { + break + } + if !(int16(c) == int16(d)) { + break + } + v.reset(OpConst16) + v.AuxInt = c + return true + } + // match: (Phi (Const32 [c]) (Const32 [d])) + // cond: int32(c) == int32(d) + // result: (Const32 [c]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst32 { + break + } + d := v.Args[1].AuxInt + if len(v.Args) != 2 { + break + } + if !(int32(c) == int32(d)) { + break + } + v.reset(OpConst32) + v.AuxInt = c + return true + } + // match: (Phi (Const64 [c]) (Const64 [c])) + // cond: + // result: (Const64 [c]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + if v.Args[1].AuxInt != v.Args[0].AuxInt { + break + } + if len(v.Args) != 2 { + break + } + v.reset(OpConst64) + v.AuxInt = c + return true + } + return false +} +func rewriteValuegeneric_OpPtrIndex(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (PtrIndex <t> ptr idx) + // cond: config.PtrSize == 4 + // result: (AddPtr ptr (Mul32 <config.fe.TypeInt()> idx (Const32 <config.fe.TypeInt()> [t.Elem().Size()]))) + for { + t := v.Type + ptr := v.Args[0] + idx := v.Args[1] + if !(config.PtrSize == 4) { + break + } + v.reset(OpAddPtr) + v.AddArg(ptr) + v0 := b.NewValue0(v.Line, OpMul32, config.fe.TypeInt()) + v0.AddArg(idx) + v1 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) + v1.AuxInt = t.Elem().Size() + v0.AddArg(v1) + v.AddArg(v0) + return true + } + // match: (PtrIndex <t> ptr idx) + // cond: config.PtrSize == 8 + // result: (AddPtr ptr (Mul64 <config.fe.TypeInt()> idx (Const64 <config.fe.TypeInt()> [t.Elem().Size()]))) + for { + t := v.Type + ptr := v.Args[0] + idx := v.Args[1] + if !(config.PtrSize == 8) { + break + } + v.reset(OpAddPtr) + v.AddArg(ptr) + v0 := b.NewValue0(v.Line, OpMul64, config.fe.TypeInt()) + v0.AddArg(idx) + v1 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) + v1.AuxInt = t.Elem().Size() + v0.AddArg(v1) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh16Ux16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh16Ux16 <t> x (Const16 [c])) + // cond: + // result: (Rsh16Ux64 x (Const64 <t> [int64(uint16(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst16 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh16Ux64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint16(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh16Ux32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh16Ux32 <t> x (Const32 [c])) + // cond: + // result: (Rsh16Ux64 x (Const64 <t> [int64(uint32(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst32 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh16Ux64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint32(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh16Ux64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh16Ux64 (Const16 [c]) (Const64 [d])) + // cond: + // result: (Const16 [int64(uint16(c) >> uint64(d))]) + for { + if v.Args[0].Op != OpConst16 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst16) + v.AuxInt = int64(uint16(c) >> uint64(d)) + return true + } + // match: (Rsh16Ux64 (Const16 [0]) _) + // cond: + // result: (Const16 [0]) + for { + if v.Args[0].Op != OpConst16 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst16) + v.AuxInt = 0 + return true + } + // match: (Rsh16Ux64 x (Const64 [0])) + // cond: + // result: x + for { + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + if v.Args[1].AuxInt != 0 { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Rsh16Ux64 _ (Const64 [c])) + // cond: uint64(c) >= 16 + // result: (Const16 [0]) + for { + if v.Args[1].Op != OpConst64 { + break + } + c := v.Args[1].AuxInt + if !(uint64(c) >= 16) { + break + } + v.reset(OpConst16) + v.AuxInt = 0 + return true + } + // match: (Rsh16Ux64 <t> (Rsh16Ux64 x (Const64 [c])) (Const64 [d])) + // cond: !uaddOvf(c,d) + // result: (Rsh16Ux64 x (Const64 <t> [c+d])) + for { + t := v.Type + if v.Args[0].Op != OpRsh16Ux64 { + break + } + x := v.Args[0].Args[0] + if v.Args[0].Args[1].Op != OpConst64 { + break + } + c := v.Args[0].Args[1].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + if !(!uaddOvf(c, d)) { + break + } + v.reset(OpRsh16Ux64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c + d + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh16Ux8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh16Ux8 <t> x (Const8 [c])) + // cond: + // result: (Rsh16Ux64 x (Const64 <t> [int64(uint8(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst8 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh16Ux64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint8(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh16x16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh16x16 <t> x (Const16 [c])) + // cond: + // result: (Rsh16x64 x (Const64 <t> [int64(uint16(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst16 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh16x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint16(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh16x32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh16x32 <t> x (Const32 [c])) + // cond: + // result: (Rsh16x64 x (Const64 <t> [int64(uint32(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst32 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh16x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint32(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh16x64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh16x64 (Const16 [c]) (Const64 [d])) + // cond: + // result: (Const16 [int64(int16(c) >> uint64(d))]) + for { + if v.Args[0].Op != OpConst16 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst16) + v.AuxInt = int64(int16(c) >> uint64(d)) + return true + } + // match: (Rsh16x64 (Const16 [0]) _) + // cond: + // result: (Const16 [0]) + for { + if v.Args[0].Op != OpConst16 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst16) + v.AuxInt = 0 + return true + } + // match: (Rsh16x64 x (Const64 [0])) + // cond: + // result: x + for { + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + if v.Args[1].AuxInt != 0 { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Rsh16x64 <t> (Rsh16x64 x (Const64 [c])) (Const64 [d])) + // cond: !uaddOvf(c,d) + // result: (Rsh16x64 x (Const64 <t> [c+d])) + for { + t := v.Type + if v.Args[0].Op != OpRsh16x64 { + break + } + x := v.Args[0].Args[0] + if v.Args[0].Args[1].Op != OpConst64 { + break + } + c := v.Args[0].Args[1].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + if !(!uaddOvf(c, d)) { + break + } + v.reset(OpRsh16x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c + d + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh16x8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh16x8 <t> x (Const8 [c])) + // cond: + // result: (Rsh16x64 x (Const64 <t> [int64(uint8(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst8 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh16x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint8(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh32Ux16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh32Ux16 <t> x (Const16 [c])) + // cond: + // result: (Rsh32Ux64 x (Const64 <t> [int64(uint16(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst16 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh32Ux64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint16(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh32Ux32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh32Ux32 <t> x (Const32 [c])) + // cond: + // result: (Rsh32Ux64 x (Const64 <t> [int64(uint32(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst32 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh32Ux64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint32(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh32Ux64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh32Ux64 (Const32 [c]) (Const64 [d])) + // cond: + // result: (Const32 [int64(uint32(c) >> uint64(d))]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst32) + v.AuxInt = int64(uint32(c) >> uint64(d)) + return true + } + // match: (Rsh32Ux64 (Const32 [0]) _) + // cond: + // result: (Const32 [0]) + for { + if v.Args[0].Op != OpConst32 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst32) + v.AuxInt = 0 + return true + } + // match: (Rsh32Ux64 x (Const64 [0])) + // cond: + // result: x + for { + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + if v.Args[1].AuxInt != 0 { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Rsh32Ux64 _ (Const64 [c])) + // cond: uint64(c) >= 32 + // result: (Const32 [0]) + for { + if v.Args[1].Op != OpConst64 { + break + } + c := v.Args[1].AuxInt + if !(uint64(c) >= 32) { + break + } + v.reset(OpConst32) + v.AuxInt = 0 + return true + } + // match: (Rsh32Ux64 <t> (Rsh32Ux64 x (Const64 [c])) (Const64 [d])) + // cond: !uaddOvf(c,d) + // result: (Rsh32Ux64 x (Const64 <t> [c+d])) + for { + t := v.Type + if v.Args[0].Op != OpRsh32Ux64 { + break + } + x := v.Args[0].Args[0] + if v.Args[0].Args[1].Op != OpConst64 { + break + } + c := v.Args[0].Args[1].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + if !(!uaddOvf(c, d)) { + break + } + v.reset(OpRsh32Ux64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c + d + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh32Ux8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh32Ux8 <t> x (Const8 [c])) + // cond: + // result: (Rsh32Ux64 x (Const64 <t> [int64(uint8(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst8 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh32Ux64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint8(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh32x16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh32x16 <t> x (Const16 [c])) + // cond: + // result: (Rsh32x64 x (Const64 <t> [int64(uint16(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst16 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh32x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint16(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh32x32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh32x32 <t> x (Const32 [c])) + // cond: + // result: (Rsh32x64 x (Const64 <t> [int64(uint32(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst32 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh32x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint32(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh32x64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh32x64 (Const32 [c]) (Const64 [d])) + // cond: + // result: (Const32 [int64(int32(c) >> uint64(d))]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst32) + v.AuxInt = int64(int32(c) >> uint64(d)) + return true + } + // match: (Rsh32x64 (Const32 [0]) _) + // cond: + // result: (Const32 [0]) + for { + if v.Args[0].Op != OpConst32 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst32) + v.AuxInt = 0 + return true + } + // match: (Rsh32x64 x (Const64 [0])) + // cond: + // result: x + for { + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + if v.Args[1].AuxInt != 0 { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Rsh32x64 <t> (Rsh32x64 x (Const64 [c])) (Const64 [d])) + // cond: !uaddOvf(c,d) + // result: (Rsh32x64 x (Const64 <t> [c+d])) + for { + t := v.Type + if v.Args[0].Op != OpRsh32x64 { + break + } + x := v.Args[0].Args[0] + if v.Args[0].Args[1].Op != OpConst64 { + break + } + c := v.Args[0].Args[1].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + if !(!uaddOvf(c, d)) { + break + } + v.reset(OpRsh32x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c + d + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh32x8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh32x8 <t> x (Const8 [c])) + // cond: + // result: (Rsh32x64 x (Const64 <t> [int64(uint8(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst8 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh32x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint8(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh64Ux16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh64Ux16 <t> x (Const16 [c])) + // cond: + // result: (Rsh64Ux64 x (Const64 <t> [int64(uint16(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst16 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh64Ux64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint16(c)) + v.AddArg(v0) + return true + } + // match: (Rsh64Ux16 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } + return false +} +func rewriteValuegeneric_OpRsh64Ux32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh64Ux32 <t> x (Const32 [c])) + // cond: + // result: (Rsh64Ux64 x (Const64 <t> [int64(uint32(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst32 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh64Ux64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint32(c)) + v.AddArg(v0) + return true + } + // match: (Rsh64Ux32 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } + return false +} +func rewriteValuegeneric_OpRsh64Ux64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh64Ux64 (Const64 [c]) (Const64 [d])) + // cond: + // result: (Const64 [int64(uint64(c) >> uint64(d))]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst64) + v.AuxInt = int64(uint64(c) >> uint64(d)) + return true + } + // match: (Rsh64Ux64 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } + // match: (Rsh64Ux64 x (Const64 [0])) + // cond: + // result: x + for { + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + if v.Args[1].AuxInt != 0 { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Rsh64Ux64 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } + // match: (Rsh64Ux64 _ (Const64 [c])) + // cond: uint64(c) >= 64 + // result: (Const64 [0]) + for { + if v.Args[1].Op != OpConst64 { + break + } + c := v.Args[1].AuxInt + if !(uint64(c) >= 64) { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } + // match: (Rsh64Ux64 <t> (Rsh64Ux64 x (Const64 [c])) (Const64 [d])) + // cond: !uaddOvf(c,d) + // result: (Rsh64Ux64 x (Const64 <t> [c+d])) + for { + t := v.Type + if v.Args[0].Op != OpRsh64Ux64 { + break + } + x := v.Args[0].Args[0] + if v.Args[0].Args[1].Op != OpConst64 { + break + } + c := v.Args[0].Args[1].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + if !(!uaddOvf(c, d)) { + break + } + v.reset(OpRsh64Ux64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c + d + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh64Ux8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh64Ux8 <t> x (Const8 [c])) + // cond: + // result: (Rsh64Ux64 x (Const64 <t> [int64(uint8(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst8 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh64Ux64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint8(c)) + v.AddArg(v0) + return true + } + // match: (Rsh64Ux8 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } + return false +} +func rewriteValuegeneric_OpRsh64x16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh64x16 <t> x (Const16 [c])) + // cond: + // result: (Rsh64x64 x (Const64 <t> [int64(uint16(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst16 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh64x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint16(c)) + v.AddArg(v0) + return true + } + // match: (Rsh64x16 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } + return false +} +func rewriteValuegeneric_OpRsh64x32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh64x32 <t> x (Const32 [c])) + // cond: + // result: (Rsh64x64 x (Const64 <t> [int64(uint32(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst32 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh64x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint32(c)) + v.AddArg(v0) + return true + } + // match: (Rsh64x32 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } + return false +} +func rewriteValuegeneric_OpRsh64x64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh64x64 (Const64 [c]) (Const64 [d])) + // cond: + // result: (Const64 [c >> uint64(d)]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst64) + v.AuxInt = c >> uint64(d) + return true + } + // match: (Rsh64x64 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } + // match: (Rsh64x64 x (Const64 [0])) + // cond: + // result: x + for { + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + if v.Args[1].AuxInt != 0 { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Rsh64x64 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } + // match: (Rsh64x64 <t> (Rsh64x64 x (Const64 [c])) (Const64 [d])) + // cond: !uaddOvf(c,d) + // result: (Rsh64x64 x (Const64 <t> [c+d])) + for { + t := v.Type + if v.Args[0].Op != OpRsh64x64 { + break + } + x := v.Args[0].Args[0] + if v.Args[0].Args[1].Op != OpConst64 { + break + } + c := v.Args[0].Args[1].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + if !(!uaddOvf(c, d)) { + break + } + v.reset(OpRsh64x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c + d + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh64x8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh64x8 <t> x (Const8 [c])) + // cond: + // result: (Rsh64x64 x (Const64 <t> [int64(uint8(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst8 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh64x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint8(c)) + v.AddArg(v0) + return true + } + // match: (Rsh64x8 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } + return false +} +func rewriteValuegeneric_OpRsh8Ux16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh8Ux16 <t> x (Const16 [c])) + // cond: + // result: (Rsh8Ux64 x (Const64 <t> [int64(uint16(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst16 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh8Ux64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint16(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh8Ux32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh8Ux32 <t> x (Const32 [c])) + // cond: + // result: (Rsh8Ux64 x (Const64 <t> [int64(uint32(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst32 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh8Ux64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint32(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh8Ux64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh8Ux64 (Const8 [c]) (Const64 [d])) + // cond: + // result: (Const8 [int64(uint8(c) >> uint64(d))]) + for { + if v.Args[0].Op != OpConst8 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst8) + v.AuxInt = int64(uint8(c) >> uint64(d)) + return true + } + // match: (Rsh8Ux64 (Const8 [0]) _) + // cond: + // result: (Const8 [0]) + for { + if v.Args[0].Op != OpConst8 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst8) + v.AuxInt = 0 + return true + } + // match: (Rsh8Ux64 x (Const64 [0])) + // cond: + // result: x + for { + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + if v.Args[1].AuxInt != 0 { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Rsh8Ux64 _ (Const64 [c])) + // cond: uint64(c) >= 8 + // result: (Const8 [0]) + for { + if v.Args[1].Op != OpConst64 { + break + } + c := v.Args[1].AuxInt + if !(uint64(c) >= 8) { + break + } + v.reset(OpConst8) + v.AuxInt = 0 + return true + } + // match: (Rsh8Ux64 <t> (Rsh8Ux64 x (Const64 [c])) (Const64 [d])) + // cond: !uaddOvf(c,d) + // result: (Rsh8Ux64 x (Const64 <t> [c+d])) + for { + t := v.Type + if v.Args[0].Op != OpRsh8Ux64 { + break + } + x := v.Args[0].Args[0] + if v.Args[0].Args[1].Op != OpConst64 { + break + } + c := v.Args[0].Args[1].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + if !(!uaddOvf(c, d)) { + break + } + v.reset(OpRsh8Ux64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c + d + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh8Ux8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh8Ux8 <t> x (Const8 [c])) + // cond: + // result: (Rsh8Ux64 x (Const64 <t> [int64(uint8(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst8 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh8Ux64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint8(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh8x16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh8x16 <t> x (Const16 [c])) + // cond: + // result: (Rsh8x64 x (Const64 <t> [int64(uint16(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst16 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh8x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint16(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh8x32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh8x32 <t> x (Const32 [c])) + // cond: + // result: (Rsh8x64 x (Const64 <t> [int64(uint32(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst32 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh8x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint32(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh8x64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh8x64 (Const8 [c]) (Const64 [d])) + // cond: + // result: (Const8 [int64(int8(c) >> uint64(d))]) + for { + if v.Args[0].Op != OpConst8 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst8) + v.AuxInt = int64(int8(c) >> uint64(d)) + return true + } + // match: (Rsh8x64 (Const8 [0]) _) + // cond: + // result: (Const8 [0]) + for { + if v.Args[0].Op != OpConst8 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst8) + v.AuxInt = 0 + return true + } + // match: (Rsh8x64 x (Const64 [0])) + // cond: + // result: x + for { + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + if v.Args[1].AuxInt != 0 { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (Rsh8x64 <t> (Rsh8x64 x (Const64 [c])) (Const64 [d])) + // cond: !uaddOvf(c,d) + // result: (Rsh8x64 x (Const64 <t> [c+d])) + for { + t := v.Type + if v.Args[0].Op != OpRsh8x64 { + break + } + x := v.Args[0].Args[0] + if v.Args[0].Args[1].Op != OpConst64 { + break + } + c := v.Args[0].Args[1].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + if !(!uaddOvf(c, d)) { + break + } + v.reset(OpRsh8x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c + d + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpRsh8x8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Rsh8x8 <t> x (Const8 [c])) + // cond: + // result: (Rsh8x64 x (Const64 <t> [int64(uint8(c))])) + for { + t := v.Type + x := v.Args[0] + if v.Args[1].Op != OpConst8 { + break + } + c := v.Args[1].AuxInt + v.reset(OpRsh8x64) + v.AddArg(x) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = int64(uint8(c)) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpSliceCap(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (SliceCap (SliceMake _ _ cap)) + // cond: + // result: cap + for { + if v.Args[0].Op != OpSliceMake { + break + } + cap := v.Args[0].Args[2] + v.reset(OpCopy) + v.Type = cap.Type + v.AddArg(cap) + return true + } + return false +} +func rewriteValuegeneric_OpSliceLen(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (SliceLen (SliceMake _ len _)) + // cond: + // result: len + for { + if v.Args[0].Op != OpSliceMake { + break + } + len := v.Args[0].Args[1] + v.reset(OpCopy) + v.Type = len.Type + v.AddArg(len) + return true + } + return false +} +func rewriteValuegeneric_OpSlicePtr(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (SlicePtr (SliceMake ptr _ _ )) + // cond: + // result: ptr + for { + if v.Args[0].Op != OpSliceMake { + break + } + ptr := v.Args[0].Args[0] + v.reset(OpCopy) + v.Type = ptr.Type + v.AddArg(ptr) + return true + } + return false +} +func rewriteValuegeneric_OpStore(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Store _ (StructMake0) mem) + // cond: + // result: mem + for { + if v.Args[1].Op != OpStructMake0 { + break + } + mem := v.Args[2] + v.reset(OpCopy) + v.Type = mem.Type + v.AddArg(mem) + return true + } + // match: (Store dst (StructMake1 <t> f0) mem) + // cond: + // result: (Store [t.FieldType(0).Size()] dst f0 mem) + for { + dst := v.Args[0] + if v.Args[1].Op != OpStructMake1 { + break + } + t := v.Args[1].Type + f0 := v.Args[1].Args[0] + mem := v.Args[2] + v.reset(OpStore) + v.AuxInt = t.FieldType(0).Size() + v.AddArg(dst) + v.AddArg(f0) + v.AddArg(mem) + return true + } + // match: (Store dst (StructMake2 <t> f0 f1) mem) + // cond: + // result: (Store [t.FieldType(1).Size()] (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store [t.FieldType(0).Size()] dst f0 mem)) + for { + dst := v.Args[0] + if v.Args[1].Op != OpStructMake2 { + break + } + t := v.Args[1].Type + f0 := v.Args[1].Args[0] + f1 := v.Args[1].Args[1] + mem := v.Args[2] + v.reset(OpStore) + v.AuxInt = t.FieldType(1).Size() + v0 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) + v0.AuxInt = t.FieldOff(1) + v0.AddArg(dst) + v.AddArg(v0) + v.AddArg(f1) + v1 := b.NewValue0(v.Line, OpStore, TypeMem) + v1.AuxInt = t.FieldType(0).Size() + v1.AddArg(dst) + v1.AddArg(f0) + v1.AddArg(mem) + v.AddArg(v1) + return true + } + // match: (Store dst (StructMake3 <t> f0 f1 f2) mem) + // cond: + // result: (Store [t.FieldType(2).Size()] (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store [t.FieldType(1).Size()] (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store [t.FieldType(0).Size()] dst f0 mem))) + for { + dst := v.Args[0] + if v.Args[1].Op != OpStructMake3 { + break + } + t := v.Args[1].Type + f0 := v.Args[1].Args[0] + f1 := v.Args[1].Args[1] + f2 := v.Args[1].Args[2] + mem := v.Args[2] + v.reset(OpStore) + v.AuxInt = t.FieldType(2).Size() + v0 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) + v0.AuxInt = t.FieldOff(2) + v0.AddArg(dst) + v.AddArg(v0) + v.AddArg(f2) + v1 := b.NewValue0(v.Line, OpStore, TypeMem) + v1.AuxInt = t.FieldType(1).Size() + v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) + v2.AuxInt = t.FieldOff(1) + v2.AddArg(dst) + v1.AddArg(v2) + v1.AddArg(f1) + v3 := b.NewValue0(v.Line, OpStore, TypeMem) + v3.AuxInt = t.FieldType(0).Size() + v3.AddArg(dst) + v3.AddArg(f0) + v3.AddArg(mem) + v1.AddArg(v3) + v.AddArg(v1) + return true + } + // match: (Store dst (StructMake4 <t> f0 f1 f2 f3) mem) + // cond: + // result: (Store [t.FieldType(3).Size()] (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] dst) f3 (Store [t.FieldType(2).Size()] (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store [t.FieldType(1).Size()] (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store [t.FieldType(0).Size()] dst f0 mem)))) + for { + dst := v.Args[0] + if v.Args[1].Op != OpStructMake4 { + break + } + t := v.Args[1].Type + f0 := v.Args[1].Args[0] + f1 := v.Args[1].Args[1] + f2 := v.Args[1].Args[2] + f3 := v.Args[1].Args[3] + mem := v.Args[2] + v.reset(OpStore) + v.AuxInt = t.FieldType(3).Size() + v0 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(3).PtrTo()) + v0.AuxInt = t.FieldOff(3) + v0.AddArg(dst) + v.AddArg(v0) + v.AddArg(f3) + v1 := b.NewValue0(v.Line, OpStore, TypeMem) + v1.AuxInt = t.FieldType(2).Size() + v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) + v2.AuxInt = t.FieldOff(2) + v2.AddArg(dst) + v1.AddArg(v2) + v1.AddArg(f2) + v3 := b.NewValue0(v.Line, OpStore, TypeMem) + v3.AuxInt = t.FieldType(1).Size() + v4 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) + v4.AuxInt = t.FieldOff(1) + v4.AddArg(dst) + v3.AddArg(v4) + v3.AddArg(f1) + v5 := b.NewValue0(v.Line, OpStore, TypeMem) + v5.AuxInt = t.FieldType(0).Size() + v5.AddArg(dst) + v5.AddArg(f0) + v5.AddArg(mem) + v3.AddArg(v5) + v1.AddArg(v3) + v.AddArg(v1) + return true + } + // match: (Store [8] dst (ComplexMake real imag) mem) + // cond: + // result: (Store [4] (OffPtr <config.fe.TypeFloat32().PtrTo()> [4] dst) imag (Store [4] dst real mem)) + for { + if v.AuxInt != 8 { + break + } + dst := v.Args[0] + if v.Args[1].Op != OpComplexMake { + break + } + real := v.Args[1].Args[0] + imag := v.Args[1].Args[1] + mem := v.Args[2] + v.reset(OpStore) + v.AuxInt = 4 + v0 := b.NewValue0(v.Line, OpOffPtr, config.fe.TypeFloat32().PtrTo()) + v0.AuxInt = 4 + v0.AddArg(dst) + v.AddArg(v0) + v.AddArg(imag) + v1 := b.NewValue0(v.Line, OpStore, TypeMem) + v1.AuxInt = 4 + v1.AddArg(dst) + v1.AddArg(real) + v1.AddArg(mem) + v.AddArg(v1) + return true + } + // match: (Store [16] dst (ComplexMake real imag) mem) + // cond: + // result: (Store [8] (OffPtr <config.fe.TypeFloat64().PtrTo()> [8] dst) imag (Store [8] dst real mem)) + for { + if v.AuxInt != 16 { + break + } + dst := v.Args[0] + if v.Args[1].Op != OpComplexMake { + break + } + real := v.Args[1].Args[0] + imag := v.Args[1].Args[1] + mem := v.Args[2] + v.reset(OpStore) + v.AuxInt = 8 + v0 := b.NewValue0(v.Line, OpOffPtr, config.fe.TypeFloat64().PtrTo()) + v0.AuxInt = 8 + v0.AddArg(dst) + v.AddArg(v0) + v.AddArg(imag) + v1 := b.NewValue0(v.Line, OpStore, TypeMem) + v1.AuxInt = 8 + v1.AddArg(dst) + v1.AddArg(real) + v1.AddArg(mem) + v.AddArg(v1) + return true + } + // match: (Store [2*config.PtrSize] dst (StringMake ptr len) mem) + // cond: + // result: (Store [config.PtrSize] (OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] dst) len (Store [config.PtrSize] dst ptr mem)) + for { + if v.AuxInt != 2*config.PtrSize { + break + } + dst := v.Args[0] + if v.Args[1].Op != OpStringMake { + break + } + ptr := v.Args[1].Args[0] + len := v.Args[1].Args[1] + mem := v.Args[2] + v.reset(OpStore) + v.AuxInt = config.PtrSize + v0 := b.NewValue0(v.Line, OpOffPtr, config.fe.TypeInt().PtrTo()) + v0.AuxInt = config.PtrSize + v0.AddArg(dst) + v.AddArg(v0) + v.AddArg(len) + v1 := b.NewValue0(v.Line, OpStore, TypeMem) + v1.AuxInt = config.PtrSize + v1.AddArg(dst) + v1.AddArg(ptr) + v1.AddArg(mem) + v.AddArg(v1) + return true + } + // match: (Store [3*config.PtrSize] dst (SliceMake ptr len cap) mem) + // cond: + // result: (Store [config.PtrSize] (OffPtr <config.fe.TypeInt().PtrTo()> [2*config.PtrSize] dst) cap (Store [config.PtrSize] (OffPtr <config.fe.TypeInt().PtrTo()> [config.PtrSize] dst) len (Store [config.PtrSize] dst ptr mem))) + for { + if v.AuxInt != 3*config.PtrSize { + break + } + dst := v.Args[0] + if v.Args[1].Op != OpSliceMake { + break + } + ptr := v.Args[1].Args[0] + len := v.Args[1].Args[1] + cap := v.Args[1].Args[2] + mem := v.Args[2] + v.reset(OpStore) + v.AuxInt = config.PtrSize + v0 := b.NewValue0(v.Line, OpOffPtr, config.fe.TypeInt().PtrTo()) + v0.AuxInt = 2 * config.PtrSize + v0.AddArg(dst) + v.AddArg(v0) + v.AddArg(cap) + v1 := b.NewValue0(v.Line, OpStore, TypeMem) + v1.AuxInt = config.PtrSize + v2 := b.NewValue0(v.Line, OpOffPtr, config.fe.TypeInt().PtrTo()) + v2.AuxInt = config.PtrSize + v2.AddArg(dst) + v1.AddArg(v2) + v1.AddArg(len) + v3 := b.NewValue0(v.Line, OpStore, TypeMem) + v3.AuxInt = config.PtrSize + v3.AddArg(dst) + v3.AddArg(ptr) + v3.AddArg(mem) + v1.AddArg(v3) + v.AddArg(v1) + return true + } + // match: (Store [2*config.PtrSize] dst (IMake itab data) mem) + // cond: + // result: (Store [config.PtrSize] (OffPtr <config.fe.TypeBytePtr().PtrTo()> [config.PtrSize] dst) data (Store [config.PtrSize] dst itab mem)) + for { + if v.AuxInt != 2*config.PtrSize { + break + } + dst := v.Args[0] + if v.Args[1].Op != OpIMake { + break + } + itab := v.Args[1].Args[0] + data := v.Args[1].Args[1] + mem := v.Args[2] + v.reset(OpStore) + v.AuxInt = config.PtrSize + v0 := b.NewValue0(v.Line, OpOffPtr, config.fe.TypeBytePtr().PtrTo()) + v0.AuxInt = config.PtrSize + v0.AddArg(dst) + v.AddArg(v0) + v.AddArg(data) + v1 := b.NewValue0(v.Line, OpStore, TypeMem) + v1.AuxInt = config.PtrSize + v1.AddArg(dst) + v1.AddArg(itab) + v1.AddArg(mem) + v.AddArg(v1) + return true + } + // match: (Store [size] dst (Load <t> src mem) mem) + // cond: !config.fe.CanSSA(t) + // result: (Move [size] dst src mem) + for { + size := v.AuxInt + dst := v.Args[0] + if v.Args[1].Op != OpLoad { + break + } + t := v.Args[1].Type + src := v.Args[1].Args[0] + mem := v.Args[1].Args[1] + if v.Args[2] != mem { + break + } + if !(!config.fe.CanSSA(t)) { + break + } + v.reset(OpMove) + v.AuxInt = size + v.AddArg(dst) + v.AddArg(src) + v.AddArg(mem) + return true + } + // match: (Store [size] dst (Load <t> src mem) (VarDef {x} mem)) + // cond: !config.fe.CanSSA(t) + // result: (Move [size] dst src (VarDef {x} mem)) + for { + size := v.AuxInt + dst := v.Args[0] + if v.Args[1].Op != OpLoad { + break + } + t := v.Args[1].Type + src := v.Args[1].Args[0] + mem := v.Args[1].Args[1] + if v.Args[2].Op != OpVarDef { + break + } + x := v.Args[2].Aux + if v.Args[2].Args[0] != mem { + break + } + if !(!config.fe.CanSSA(t)) { + break + } + v.reset(OpMove) + v.AuxInt = size + v.AddArg(dst) + v.AddArg(src) + v0 := b.NewValue0(v.Line, OpVarDef, TypeMem) + v0.Aux = x + v0.AddArg(mem) + v.AddArg(v0) + return true + } + return false +} +func rewriteValuegeneric_OpStringLen(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (StringLen (StringMake _ len)) + // cond: + // result: len + for { + if v.Args[0].Op != OpStringMake { + break + } + len := v.Args[0].Args[1] + v.reset(OpCopy) + v.Type = len.Type + v.AddArg(len) + return true + } + return false +} +func rewriteValuegeneric_OpStringPtr(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (StringPtr (StringMake ptr _)) + // cond: + // result: ptr + for { + if v.Args[0].Op != OpStringMake { + break + } + ptr := v.Args[0].Args[0] + v.reset(OpCopy) + v.Type = ptr.Type + v.AddArg(ptr) + return true + } + return false +} +func rewriteValuegeneric_OpStructSelect(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (StructSelect (StructMake1 x)) + // cond: + // result: x + for { + if v.Args[0].Op != OpStructMake1 { + break + } + x := v.Args[0].Args[0] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (StructSelect [0] (StructMake2 x _)) + // cond: + // result: x + for { + if v.AuxInt != 0 { + break + } + if v.Args[0].Op != OpStructMake2 { + break + } + x := v.Args[0].Args[0] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (StructSelect [1] (StructMake2 _ x)) + // cond: + // result: x + for { + if v.AuxInt != 1 { + break + } + if v.Args[0].Op != OpStructMake2 { + break + } + x := v.Args[0].Args[1] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (StructSelect [0] (StructMake3 x _ _)) + // cond: + // result: x + for { + if v.AuxInt != 0 { + break + } + if v.Args[0].Op != OpStructMake3 { + break + } + x := v.Args[0].Args[0] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (StructSelect [1] (StructMake3 _ x _)) + // cond: + // result: x + for { + if v.AuxInt != 1 { + break + } + if v.Args[0].Op != OpStructMake3 { + break + } + x := v.Args[0].Args[1] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (StructSelect [2] (StructMake3 _ _ x)) + // cond: + // result: x + for { + if v.AuxInt != 2 { + break + } + if v.Args[0].Op != OpStructMake3 { + break + } + x := v.Args[0].Args[2] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (StructSelect [0] (StructMake4 x _ _ _)) + // cond: + // result: x + for { + if v.AuxInt != 0 { + break + } + if v.Args[0].Op != OpStructMake4 { + break + } + x := v.Args[0].Args[0] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (StructSelect [1] (StructMake4 _ x _ _)) + // cond: + // result: x + for { + if v.AuxInt != 1 { + break + } + if v.Args[0].Op != OpStructMake4 { + break + } + x := v.Args[0].Args[1] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (StructSelect [2] (StructMake4 _ _ x _)) + // cond: + // result: x + for { + if v.AuxInt != 2 { + break + } + if v.Args[0].Op != OpStructMake4 { + break + } + x := v.Args[0].Args[2] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (StructSelect [3] (StructMake4 _ _ _ x)) + // cond: + // result: x + for { + if v.AuxInt != 3 { + break + } + if v.Args[0].Op != OpStructMake4 { + break + } + x := v.Args[0].Args[3] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + // match: (StructSelect [i] (Load <t> ptr mem)) + // cond: !config.fe.CanSSA(t) + // result: @v.Args[0].Block (Load <v.Type> (OffPtr <v.Type.PtrTo()> [t.FieldOff(i)] ptr) mem) + for { + i := v.AuxInt + if v.Args[0].Op != OpLoad { + break + } + t := v.Args[0].Type + ptr := v.Args[0].Args[0] + mem := v.Args[0].Args[1] + if !(!config.fe.CanSSA(t)) { + break + } + b = v.Args[0].Block + v0 := b.NewValue0(v.Line, OpLoad, v.Type) + v.reset(OpCopy) + v.AddArg(v0) + v1 := b.NewValue0(v.Line, OpOffPtr, v.Type.PtrTo()) + v.reset(OpCopy) + v.AddArg(v1) + v1.AuxInt = t.FieldOff(i) + v1.AddArg(ptr) + v0.AddArg(v1) + v0.AddArg(mem) + return true + } + return false +} +func rewriteValuegeneric_OpSub16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Sub16 (Const16 [c]) (Const16 [d])) + // cond: + // result: (Const16 [c-d]) + for { + if v.Args[0].Op != OpConst16 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst16 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst16) + v.AuxInt = c - d + return true + } + // match: (Sub16 x (Const16 <t> [c])) + // cond: x.Op != OpConst16 + // result: (Add16 (Const16 <t> [-c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst16 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst16) { + break + } + v.reset(OpAdd16) + v0 := b.NewValue0(v.Line, OpConst16, t) + v0.AuxInt = -c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Sub16 x x) + // cond: + // result: (Const16 [0]) + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpConst16) + v.AuxInt = 0 + return true + } + // match: (Sub16 (Add16 x y) x) + // cond: + // result: y + for { + if v.Args[0].Op != OpAdd16 { + break + } + x := v.Args[0].Args[0] + y := v.Args[0].Args[1] + if v.Args[1] != x { + break + } + v.reset(OpCopy) + v.Type = y.Type + v.AddArg(y) + return true + } + // match: (Sub16 (Add16 x y) y) + // cond: + // result: x + for { + if v.Args[0].Op != OpAdd16 { + break + } + x := v.Args[0].Args[0] + y := v.Args[0].Args[1] + if v.Args[1] != y { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + return false +} +func rewriteValuegeneric_OpSub32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Sub32 (Const32 [c]) (Const32 [d])) + // cond: + // result: (Const32 [c-d]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst32 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst32) + v.AuxInt = c - d + return true + } + // match: (Sub32 x (Const32 <t> [c])) + // cond: x.Op != OpConst32 + // result: (Add32 (Const32 <t> [-c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst32 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst32) { + break + } + v.reset(OpAdd32) + v0 := b.NewValue0(v.Line, OpConst32, t) + v0.AuxInt = -c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Sub32 x x) + // cond: + // result: (Const32 [0]) + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpConst32) + v.AuxInt = 0 + return true + } + // match: (Sub32 (Add32 x y) x) + // cond: + // result: y + for { + if v.Args[0].Op != OpAdd32 { + break + } + x := v.Args[0].Args[0] + y := v.Args[0].Args[1] + if v.Args[1] != x { + break + } + v.reset(OpCopy) + v.Type = y.Type + v.AddArg(y) + return true + } + // match: (Sub32 (Add32 x y) y) + // cond: + // result: x + for { + if v.Args[0].Op != OpAdd32 { + break + } + x := v.Args[0].Args[0] + y := v.Args[0].Args[1] + if v.Args[1] != y { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + return false +} +func rewriteValuegeneric_OpSub64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Sub64 (Const64 [c]) (Const64 [d])) + // cond: + // result: (Const64 [c-d]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst64 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst64) + v.AuxInt = c - d + return true + } + // match: (Sub64 x (Const64 <t> [c])) + // cond: x.Op != OpConst64 + // result: (Add64 (Const64 <t> [-c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst64) { + break + } + v.reset(OpAdd64) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = -c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Sub64 x x) + // cond: + // result: (Const64 [0]) + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } + // match: (Sub64 (Add64 x y) x) + // cond: + // result: y + for { + if v.Args[0].Op != OpAdd64 { + break + } + x := v.Args[0].Args[0] + y := v.Args[0].Args[1] + if v.Args[1] != x { + break + } + v.reset(OpCopy) + v.Type = y.Type + v.AddArg(y) + return true + } + // match: (Sub64 (Add64 x y) y) + // cond: + // result: x + for { + if v.Args[0].Op != OpAdd64 { + break + } + x := v.Args[0].Args[0] + y := v.Args[0].Args[1] + if v.Args[1] != y { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + return false +} +func rewriteValuegeneric_OpSub8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Sub8 (Const8 [c]) (Const8 [d])) + // cond: + // result: (Const8 [c-d]) + for { + if v.Args[0].Op != OpConst8 { + break + } + c := v.Args[0].AuxInt + if v.Args[1].Op != OpConst8 { + break + } + d := v.Args[1].AuxInt + v.reset(OpConst8) + v.AuxInt = c - d + return true + } + // match: (Sub8 x (Const8 <t> [c])) + // cond: x.Op != OpConst8 + // result: (Add8 (Const8 <t> [-c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst8 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst8) { + break + } + v.reset(OpAdd8) + v0 := b.NewValue0(v.Line, OpConst8, t) + v0.AuxInt = -c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Sub8 x x) + // cond: + // result: (Const8 [0]) + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpConst8) + v.AuxInt = 0 + return true + } + // match: (Sub8 (Add8 x y) x) + // cond: + // result: y + for { + if v.Args[0].Op != OpAdd8 { + break + } + x := v.Args[0].Args[0] + y := v.Args[0].Args[1] + if v.Args[1] != x { + break + } + v.reset(OpCopy) + v.Type = y.Type + v.AddArg(y) + return true + } + // match: (Sub8 (Add8 x y) y) + // cond: + // result: x + for { + if v.Args[0].Op != OpAdd8 { + break + } + x := v.Args[0].Args[0] + y := v.Args[0].Args[1] + if v.Args[1] != y { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + return false +} +func rewriteValuegeneric_OpTrunc16to8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Trunc16to8 (Const16 [c])) + // cond: + // result: (Const8 [int64(int8(c))]) + for { + if v.Args[0].Op != OpConst16 { + break + } + c := v.Args[0].AuxInt + v.reset(OpConst8) + v.AuxInt = int64(int8(c)) + return true + } + return false +} +func rewriteValuegeneric_OpTrunc32to16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Trunc32to16 (Const32 [c])) + // cond: + // result: (Const16 [int64(int16(c))]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + v.reset(OpConst16) + v.AuxInt = int64(int16(c)) + return true + } + return false +} +func rewriteValuegeneric_OpTrunc32to8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Trunc32to8 (Const32 [c])) + // cond: + // result: (Const8 [int64(int8(c))]) + for { + if v.Args[0].Op != OpConst32 { + break + } + c := v.Args[0].AuxInt + v.reset(OpConst8) + v.AuxInt = int64(int8(c)) + return true + } + return false +} +func rewriteValuegeneric_OpTrunc64to16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Trunc64to16 (Const64 [c])) + // cond: + // result: (Const16 [int64(int16(c))]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + v.reset(OpConst16) + v.AuxInt = int64(int16(c)) + return true + } + return false +} +func rewriteValuegeneric_OpTrunc64to32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Trunc64to32 (Const64 [c])) + // cond: + // result: (Const32 [int64(int32(c))]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + v.reset(OpConst32) + v.AuxInt = int64(int32(c)) + return true + } + return false +} +func rewriteValuegeneric_OpTrunc64to8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Trunc64to8 (Const64 [c])) + // cond: + // result: (Const8 [int64(int8(c))]) + for { + if v.Args[0].Op != OpConst64 { + break + } + c := v.Args[0].AuxInt + v.reset(OpConst8) + v.AuxInt = int64(int8(c)) + return true + } + return false +} +func rewriteValuegeneric_OpXor16(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Xor16 x (Const16 <t> [c])) + // cond: x.Op != OpConst16 + // result: (Xor16 (Const16 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst16 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst16) { + break + } + v.reset(OpXor16) + v0 := b.NewValue0(v.Line, OpConst16, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Xor16 x x) + // cond: + // result: (Const16 [0]) + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpConst16) + v.AuxInt = 0 + return true + } + // match: (Xor16 (Const16 [0]) x) + // cond: + // result: x + for { + if v.Args[0].Op != OpConst16 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + x := v.Args[1] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + return false +} +func rewriteValuegeneric_OpXor32(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Xor32 x (Const32 <t> [c])) + // cond: x.Op != OpConst32 + // result: (Xor32 (Const32 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst32 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst32) { + break + } + v.reset(OpXor32) + v0 := b.NewValue0(v.Line, OpConst32, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Xor32 x x) + // cond: + // result: (Const32 [0]) + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpConst32) + v.AuxInt = 0 + return true + } + // match: (Xor32 (Const32 [0]) x) + // cond: + // result: x + for { + if v.Args[0].Op != OpConst32 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + x := v.Args[1] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + return false +} +func rewriteValuegeneric_OpXor64(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Xor64 x (Const64 <t> [c])) + // cond: x.Op != OpConst64 + // result: (Xor64 (Const64 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst64 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst64) { + break + } + v.reset(OpXor64) + v0 := b.NewValue0(v.Line, OpConst64, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Xor64 x x) + // cond: + // result: (Const64 [0]) + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } + // match: (Xor64 (Const64 [0]) x) + // cond: + // result: x + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + x := v.Args[1] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + return false +} +func rewriteValuegeneric_OpXor8(v *Value, config *Config) bool { + b := v.Block + _ = b + // match: (Xor8 x (Const8 <t> [c])) + // cond: x.Op != OpConst8 + // result: (Xor8 (Const8 <t> [c]) x) + for { + x := v.Args[0] + if v.Args[1].Op != OpConst8 { + break + } + t := v.Args[1].Type + c := v.Args[1].AuxInt + if !(x.Op != OpConst8) { + break + } + v.reset(OpXor8) + v0 := b.NewValue0(v.Line, OpConst8, t) + v0.AuxInt = c + v.AddArg(v0) + v.AddArg(x) + return true + } + // match: (Xor8 x x) + // cond: + // result: (Const8 [0]) + for { + x := v.Args[0] + if v.Args[1] != x { + break + } + v.reset(OpConst8) + v.AuxInt = 0 + return true + } + // match: (Xor8 (Const8 [0]) x) + // cond: + // result: x + for { + if v.Args[0].Op != OpConst8 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + x := v.Args[1] + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } + return false +} +func rewriteBlockgeneric(b *Block) bool { + switch b.Kind { + case BlockCheck: + // match: (Check (NilCheck (GetG _) _) next) + // cond: + // result: (Plain nil next) + for { + v := b.Control + if v.Op != OpNilCheck { + break + } + if v.Args[0].Op != OpGetG { + break + } + next := b.Succs[0] + b.Kind = BlockPlain + b.Control = nil + b.Succs[0] = next + b.Likely = BranchUnknown + return true + } + case BlockIf: + // match: (If (Not cond) yes no) + // cond: + // result: (If cond no yes) + for { + v := b.Control + if v.Op != OpNot { + break + } + cond := v.Args[0] + yes := b.Succs[0] + no := b.Succs[1] + b.Kind = BlockIf + b.Control = cond + b.Succs[0] = no + b.Succs[1] = yes + b.Likely *= -1 + return true + } + // match: (If (ConstBool [c]) yes no) + // cond: c == 1 + // result: (First nil yes no) + for { + v := b.Control + if v.Op != OpConstBool { + break + } + c := v.AuxInt + yes := b.Succs[0] + no := b.Succs[1] + if !(c == 1) { + break + } + b.Kind = BlockFirst + b.Control = nil + b.Succs[0] = yes + b.Succs[1] = no + return true + } + // match: (If (ConstBool [c]) yes no) + // cond: c == 0 + // result: (First nil no yes) + for { + v := b.Control + if v.Op != OpConstBool { + break + } + c := v.AuxInt + yes := b.Succs[0] + no := b.Succs[1] + if !(c == 0) { + break + } + b.Kind = BlockFirst + b.Control = nil + b.Succs[0] = no + b.Succs[1] = yes + b.Likely *= -1 + return true + } + } + return false +} |
