aboutsummaryrefslogtreecommitdiff
path: root/src/cmd
diff options
context:
space:
mode:
authorTimo Friedl <timofriedlberlin@gmail.com>2026-04-10 21:59:28 +0000
committerGopher Robot <gobot@golang.org>2026-04-13 03:42:16 -0700
commitf42f2a3bb3028a5b826ca81822e5244ed839ac18 (patch)
tree557fcf7a29142a4dacc74953c77bbdd0f806481c /src/cmd
parent027a5bf27f8a826d83c52e21f8adf7ec845bc59d (diff)
downloadgo-f42f2a3bb3028a5b826ca81822e5244ed839ac18.tar.xz
cmd/compile: add boolean absorption laws to SSA rewrite rules
The SSA generic rewrite rules implement DeMorgan's laws but are missing the closely related boolean absorption laws: x & (x | y) == x x | (x & y) == x These are fundamental boolean algebra identities (see https://en.wikipedia.org/wiki/Absorption_law) that hold for all bit patterns, all widths, signed and unsigned. Both GCC and LLVM recognize and optimize these patterns at -O2. Add two generic rules covering all four widths (8, 16, 32, 64). Commutativity of AND/OR is handled automatically by the rule engine, so all argument orderings are matched. The rules eliminate two redundant ALU instructions per occurrence and fire on real code (defer bit-manipulation patterns in runtime, testing, go/parser, and third-party packages). Fixes #78632 Change-Id: Ib59e839081302ad1635e823309d8aec768c25dcf GitHub-Last-Rev: 23f8296ece08c77fcaeeaf59c2c2d8ce23d1202c GitHub-Pull-Request: golang/go#78634 Reviewed-on: https://go-review.googlesource.com/c/go/+/765580 Reviewed-by: Keith Randall <khr@google.com> LUCI-TryBot-Result: Go LUCI <golang-scoped@luci-project-accounts.iam.gserviceaccount.com> Reviewed-by: David Chase <drchase@google.com> Reviewed-by: Keith Randall <khr@golang.org> Auto-Submit: Keith Randall <khr@golang.org> Reviewed-by: Jorropo <jorropo.pgm@gmail.com>
Diffstat (limited to 'src/cmd')
-rw-r--r--src/cmd/compile/internal/ssa/_gen/generic.rules4
-rw-r--r--src/cmd/compile/internal/ssa/rewritegeneric.go168
2 files changed, 172 insertions, 0 deletions
diff --git a/src/cmd/compile/internal/ssa/_gen/generic.rules b/src/cmd/compile/internal/ssa/_gen/generic.rules
index 979d23adbc..6728189888 100644
--- a/src/cmd/compile/internal/ssa/_gen/generic.rules
+++ b/src/cmd/compile/internal/ssa/_gen/generic.rules
@@ -208,6 +208,10 @@
(And(8|16|32|64) <t> (Com(8|16|32|64) x) (Com(8|16|32|64) y)) => (Com(8|16|32|64) (Or(8|16|32|64) <t> x y))
(Or(8|16|32|64) <t> (Com(8|16|32|64) x) (Com(8|16|32|64) y)) => (Com(8|16|32|64) (And(8|16|32|64) <t> x y))
+// Absorption laws
+(And(8|16|32|64) x (Or(8|16|32|64) x y)) => x
+(Or(8|16|32|64) x (And(8|16|32|64) x y)) => x
+
(Mod8 (Const8 [c]) (Const8 [d])) && d != 0 => (Const8 [c % d])
(Mod16 (Const16 [c]) (Const16 [d])) && d != 0 => (Const16 [c % d])
(Mod32 (Const32 [c]) (Const32 [d])) && d != 0 => (Const32 [c % d])
diff --git a/src/cmd/compile/internal/ssa/rewritegeneric.go b/src/cmd/compile/internal/ssa/rewritegeneric.go
index 628264c886..1b3d6c17d2 100644
--- a/src/cmd/compile/internal/ssa/rewritegeneric.go
+++ b/src/cmd/compile/internal/ssa/rewritegeneric.go
@@ -3117,6 +3117,27 @@ func rewriteValuegeneric_OpAnd16(v *Value) bool {
}
break
}
+ // match: (And16 x (Or16 x y))
+ // result: x
+ for {
+ for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+ x := v_0
+ if v_1.Op != OpOr16 {
+ continue
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
+ if x != v_1_0 {
+ continue
+ }
+ v.copyOf(x)
+ return true
+ }
+ }
+ break
+ }
// match: (And16 (Const16 [m]) (Rsh16Ux64 _ (Const64 [c])))
// cond: c >= int64(16-ntz16(m))
// result: (Const16 [0])
@@ -3353,6 +3374,27 @@ func rewriteValuegeneric_OpAnd32(v *Value) bool {
}
break
}
+ // match: (And32 x (Or32 x y))
+ // result: x
+ for {
+ for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+ x := v_0
+ if v_1.Op != OpOr32 {
+ continue
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
+ if x != v_1_0 {
+ continue
+ }
+ v.copyOf(x)
+ return true
+ }
+ }
+ break
+ }
// match: (And32 (Const32 [m]) (Rsh32Ux64 _ (Const64 [c])))
// cond: c >= int64(32-ntz32(m))
// result: (Const32 [0])
@@ -3589,6 +3631,27 @@ func rewriteValuegeneric_OpAnd64(v *Value) bool {
}
break
}
+ // match: (And64 x (Or64 x y))
+ // result: x
+ for {
+ for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+ x := v_0
+ if v_1.Op != OpOr64 {
+ continue
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
+ if x != v_1_0 {
+ continue
+ }
+ v.copyOf(x)
+ return true
+ }
+ }
+ break
+ }
// match: (And64 (Const64 [m]) (Rsh64Ux64 _ (Const64 [c])))
// cond: c >= int64(64-ntz64(m))
// result: (Const64 [0])
@@ -3825,6 +3888,27 @@ func rewriteValuegeneric_OpAnd8(v *Value) bool {
}
break
}
+ // match: (And8 x (Or8 x y))
+ // result: x
+ for {
+ for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+ x := v_0
+ if v_1.Op != OpOr8 {
+ continue
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
+ if x != v_1_0 {
+ continue
+ }
+ v.copyOf(x)
+ return true
+ }
+ }
+ break
+ }
// match: (And8 (Const8 [m]) (Rsh8Ux64 _ (Const64 [c])))
// cond: c >= int64(8-ntz8(m))
// result: (Const8 [0])
@@ -21851,6 +21935,27 @@ func rewriteValuegeneric_OpOr16(v *Value) bool {
}
break
}
+ // match: (Or16 x (And16 x y))
+ // result: x
+ for {
+ for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+ x := v_0
+ if v_1.Op != OpAnd16 {
+ continue
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
+ if x != v_1_0 {
+ continue
+ }
+ v.copyOf(x)
+ return true
+ }
+ }
+ break
+ }
// match: (Or16 x x)
// result: x
for {
@@ -22376,6 +22481,27 @@ func rewriteValuegeneric_OpOr32(v *Value) bool {
}
break
}
+ // match: (Or32 x (And32 x y))
+ // result: x
+ for {
+ for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+ x := v_0
+ if v_1.Op != OpAnd32 {
+ continue
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
+ if x != v_1_0 {
+ continue
+ }
+ v.copyOf(x)
+ return true
+ }
+ }
+ break
+ }
// match: (Or32 x x)
// result: x
for {
@@ -22901,6 +23027,27 @@ func rewriteValuegeneric_OpOr64(v *Value) bool {
}
break
}
+ // match: (Or64 x (And64 x y))
+ // result: x
+ for {
+ for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+ x := v_0
+ if v_1.Op != OpAnd64 {
+ continue
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
+ if x != v_1_0 {
+ continue
+ }
+ v.copyOf(x)
+ return true
+ }
+ }
+ break
+ }
// match: (Or64 x x)
// result: x
for {
@@ -23426,6 +23573,27 @@ func rewriteValuegeneric_OpOr8(v *Value) bool {
}
break
}
+ // match: (Or8 x (And8 x y))
+ // result: x
+ for {
+ for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+ x := v_0
+ if v_1.Op != OpAnd8 {
+ continue
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
+ if x != v_1_0 {
+ continue
+ }
+ v.copyOf(x)
+ return true
+ }
+ }
+ break
+ }
// match: (Or8 x x)
// result: x
for {