aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLynn Boger <laboger@linux.vnet.ibm.com>2021-10-05 10:22:47 -0500
committerLynn Boger <laboger@linux.vnet.ibm.com>2021-10-06 12:54:11 +0000
commitce72766a02a4be127a26e95fbd62c4b4bb906e91 (patch)
treead9f823d8fad9950ca58bee70326f2493486aba6
parent72c52bfbe2f72fbcacc865e18f132366bdd2effa (diff)
downloadgo-ce72766a02a4be127a26e95fbd62c4b4bb906e91.tar.xz
cmd/compile: improve PPC64 rules for AtomicLoad{8,32}
This adds a rule to avoid the zero extension after an AtomicLoad8 or AtomicLoad32 since the atomic load has already filled it with zeros. This eliminates an instruction in a high use block in findObject and the AtomicLoad8 appears many times within runtime. Change-Id: I7e684bf73d3812110bd371e05b1aa44fa235fc9b Reviewed-on: https://go-review.googlesource.com/c/go/+/354029 Run-TryBot: Lynn Boger <laboger@linux.vnet.ibm.com> TryBot-Result: Go Bot <gobot@golang.org> Reviewed-by: Cherry Mui <cherryyz@google.com> Trust: Lynn Boger <laboger@linux.vnet.ibm.com>
-rw-r--r--src/cmd/compile/internal/ssa/gen/PPC64.rules2
-rw-r--r--src/cmd/compile/internal/ssa/rewritePPC64.go28
2 files changed, 30 insertions, 0 deletions
diff --git a/src/cmd/compile/internal/ssa/gen/PPC64.rules b/src/cmd/compile/internal/ssa/gen/PPC64.rules
index 4c766df4b3..8e42bae215 100644
--- a/src/cmd/compile/internal/ssa/gen/PPC64.rules
+++ b/src/cmd/compile/internal/ssa/gen/PPC64.rules
@@ -1019,6 +1019,8 @@
(MOVWZreg x:(MOVWZloadidx _ _ _)) => x
(MOVWreg x:(MOVWload _ _)) => x
(MOVWreg x:(MOVWloadidx _ _ _)) => x
+(MOVBZreg x:(Select0 (LoweredAtomicLoad8 _ _))) => x
+(MOVWZreg x:(Select0 (LoweredAtomicLoad32 _ _))) => x
// don't extend if argument is already extended
(MOVBreg x:(Arg <t>)) && is8BitInt(t) && isSigned(t) => x
diff --git a/src/cmd/compile/internal/ssa/rewritePPC64.go b/src/cmd/compile/internal/ssa/rewritePPC64.go
index b278a4cb44..1e6624e906 100644
--- a/src/cmd/compile/internal/ssa/rewritePPC64.go
+++ b/src/cmd/compile/internal/ssa/rewritePPC64.go
@@ -7093,6 +7093,20 @@ func rewriteValuePPC64_OpPPC64MOVBZreg(v *Value) bool {
v.copyOf(x)
return true
}
+ // match: (MOVBZreg x:(Select0 (LoweredAtomicLoad8 _ _)))
+ // result: x
+ for {
+ x := v_0
+ if x.Op != OpSelect0 {
+ break
+ }
+ x_0 := x.Args[0]
+ if x_0.Op != OpPPC64LoweredAtomicLoad8 {
+ break
+ }
+ v.copyOf(x)
+ return true
+ }
// match: (MOVBZreg x:(Arg <t>))
// cond: is8BitInt(t) && !isSigned(t)
// result: x
@@ -10549,6 +10563,20 @@ func rewriteValuePPC64_OpPPC64MOVWZreg(v *Value) bool {
v.copyOf(x)
return true
}
+ // match: (MOVWZreg x:(Select0 (LoweredAtomicLoad32 _ _)))
+ // result: x
+ for {
+ x := v_0
+ if x.Op != OpSelect0 {
+ break
+ }
+ x_0 := x.Args[0]
+ if x_0.Op != OpPPC64LoweredAtomicLoad32 {
+ break
+ }
+ v.copyOf(x)
+ return true
+ }
// match: (MOVWZreg x:(Arg <t>))
// cond: (is8BitInt(t) || is16BitInt(t) || is32BitInt(t)) && !isSigned(t)
// result: x