aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile/internal/ssa
diff options
context:
space:
mode:
authorMichael Pratt <mpratt@google.com>2026-03-20 17:50:07 -0400
committerKeith Randall <khr@golang.org>2026-03-20 18:35:35 -0700
commit5f5f4ccdb385fa73de5729cfe8c0336b44a88f4c (patch)
tree712adbd438c58e8a483227ce52725a4b9ef902e0 /src/cmd/compile/internal/ssa
parent16018b05ae226e7a99f166bded7f939c5b0c4a98 (diff)
downloadgo-5f5f4ccdb385fa73de5729cfe8c0336b44a88f4c.tar.xz
Revert "runtime, cmd/compile: use preemptible memclr for large pointer-free clears"
This reverts CL 750480. Reason: Adding preemptible memclrNoHeapPointers exposes existing unsafe use of notInHeapSlice, causing crashes. Revert the memclr stack until the underlying issue is fixed. We keep the test added in CL 755942, which is useful regardless. For #78254. Change-Id: I8be3f9a20292b7f294e98e74e5a86c6a204406ae Reviewed-on: https://go-review.googlesource.com/c/go/+/757343 Reviewed-by: Keith Randall <khr@google.com> LUCI-TryBot-Result: Go LUCI <golang-scoped@luci-project-accounts.iam.gserviceaccount.com> Reviewed-by: Keith Randall <khr@golang.org>
Diffstat (limited to 'src/cmd/compile/internal/ssa')
-rw-r--r--src/cmd/compile/internal/ssa/_gen/generic.rules6
-rw-r--r--src/cmd/compile/internal/ssa/rewritegeneric.go8
2 files changed, 6 insertions, 8 deletions
diff --git a/src/cmd/compile/internal/ssa/_gen/generic.rules b/src/cmd/compile/internal/ssa/_gen/generic.rules
index 32d95a52d8..d18047810d 100644
--- a/src/cmd/compile/internal/ssa/_gen/generic.rules
+++ b/src/cmd/compile/internal/ssa/_gen/generic.rules
@@ -1605,13 +1605,11 @@
=> (AndB (MemEq p q (Const64 <typ.Int64> [16]) mem)
(MemEq (OffPtr <p.Type> p [16]) (OffPtr <q.Type> q [16]) (Const64 <typ.Int64> [c-16]) mem))
-// Turn known-size calls to memclrNoHeapPointers or memclrNoHeapPointersPreemptible into a Zero.
-// When the size is a known constant, inlining to OpZero is safe. Dynamic-size calls remain as
-// runtime calls and go through the chunked preemptible path (memclrNoHeapPointersPreemptible).
+// Turn known-size calls to memclrNoHeapPointers into a Zero.
// Note that we are using types.Types[types.TUINT8] instead of sptr.Type.Elem() - see issue 55122 and CL 431496 for more details.
(SelectN [0] call:(StaticCall {sym} sptr (Const(64|32) [c]) mem))
&& isInlinableMemclr(config, int64(c))
- && (isSameCall(sym, "runtime.memclrNoHeapPointers") || isSameCall(sym, "runtime.memclrNoHeapPointersPreemptible"))
+ && isSameCall(sym, "runtime.memclrNoHeapPointers")
&& call.Uses == 1
&& clobber(call)
=> (Zero {types.Types[types.TUINT8]} [int64(c)] sptr mem)
diff --git a/src/cmd/compile/internal/ssa/rewritegeneric.go b/src/cmd/compile/internal/ssa/rewritegeneric.go
index 122c4fe4cd..ad9dfd46d5 100644
--- a/src/cmd/compile/internal/ssa/rewritegeneric.go
+++ b/src/cmd/compile/internal/ssa/rewritegeneric.go
@@ -30094,7 +30094,7 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
return true
}
// match: (SelectN [0] call:(StaticCall {sym} sptr (Const64 [c]) mem))
- // cond: isInlinableMemclr(config, int64(c)) && (isSameCall(sym, "runtime.memclrNoHeapPointers") || isSameCall(sym, "runtime.memclrNoHeapPointersPreemptible")) && call.Uses == 1 && clobber(call)
+ // cond: isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)
// result: (Zero {types.Types[types.TUINT8]} [int64(c)] sptr mem)
for {
if auxIntToInt64(v.AuxInt) != 0 {
@@ -30112,7 +30112,7 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
break
}
c := auxIntToInt64(call_1.AuxInt)
- if !(isInlinableMemclr(config, int64(c)) && (isSameCall(sym, "runtime.memclrNoHeapPointers") || isSameCall(sym, "runtime.memclrNoHeapPointersPreemptible")) && call.Uses == 1 && clobber(call)) {
+ if !(isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)) {
break
}
v.reset(OpZero)
@@ -30122,7 +30122,7 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
return true
}
// match: (SelectN [0] call:(StaticCall {sym} sptr (Const32 [c]) mem))
- // cond: isInlinableMemclr(config, int64(c)) && (isSameCall(sym, "runtime.memclrNoHeapPointers") || isSameCall(sym, "runtime.memclrNoHeapPointersPreemptible")) && call.Uses == 1 && clobber(call)
+ // cond: isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)
// result: (Zero {types.Types[types.TUINT8]} [int64(c)] sptr mem)
for {
if auxIntToInt64(v.AuxInt) != 0 {
@@ -30140,7 +30140,7 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
break
}
c := auxIntToInt32(call_1.AuxInt)
- if !(isInlinableMemclr(config, int64(c)) && (isSameCall(sym, "runtime.memclrNoHeapPointers") || isSameCall(sym, "runtime.memclrNoHeapPointersPreemptible")) && call.Uses == 1 && clobber(call)) {
+ if !(isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)) {
break
}
v.reset(OpZero)