aboutsummaryrefslogtreecommitdiff
path: root/src/cmd
diff options
context:
space:
mode:
authorMichael Pratt <mpratt@google.com>2026-03-20 17:50:07 -0400
committerKeith Randall <khr@golang.org>2026-03-20 18:35:35 -0700
commit5f5f4ccdb385fa73de5729cfe8c0336b44a88f4c (patch)
tree712adbd438c58e8a483227ce52725a4b9ef902e0 /src/cmd
parent16018b05ae226e7a99f166bded7f939c5b0c4a98 (diff)
downloadgo-5f5f4ccdb385fa73de5729cfe8c0336b44a88f4c.tar.xz
Revert "runtime, cmd/compile: use preemptible memclr for large pointer-free clears"
This reverts CL 750480. Reason: Adding preemptible memclrNoHeapPointers exposes existing unsafe use of notInHeapSlice, causing crashes. Revert the memclr stack until the underlying issue is fixed. We keep the test added in CL 755942, which is useful regardless. For #78254. Change-Id: I8be3f9a20292b7f294e98e74e5a86c6a204406ae Reviewed-on: https://go-review.googlesource.com/c/go/+/757343 Reviewed-by: Keith Randall <khr@google.com> LUCI-TryBot-Result: Go LUCI <golang-scoped@luci-project-accounts.iam.gserviceaccount.com> Reviewed-by: Keith Randall <khr@golang.org>
Diffstat (limited to 'src/cmd')
-rw-r--r--src/cmd/compile/internal/ssa/_gen/generic.rules6
-rw-r--r--src/cmd/compile/internal/ssa/rewritegeneric.go8
-rw-r--r--src/cmd/compile/internal/typecheck/_builtin/runtime.go1
-rw-r--r--src/cmd/compile/internal/typecheck/builtin.go1
-rw-r--r--src/cmd/compile/internal/walk/assign.go2
-rw-r--r--src/cmd/compile/internal/walk/range.go4
6 files changed, 9 insertions, 13 deletions
diff --git a/src/cmd/compile/internal/ssa/_gen/generic.rules b/src/cmd/compile/internal/ssa/_gen/generic.rules
index 32d95a52d8..d18047810d 100644
--- a/src/cmd/compile/internal/ssa/_gen/generic.rules
+++ b/src/cmd/compile/internal/ssa/_gen/generic.rules
@@ -1605,13 +1605,11 @@
=> (AndB (MemEq p q (Const64 <typ.Int64> [16]) mem)
(MemEq (OffPtr <p.Type> p [16]) (OffPtr <q.Type> q [16]) (Const64 <typ.Int64> [c-16]) mem))
-// Turn known-size calls to memclrNoHeapPointers or memclrNoHeapPointersPreemptible into a Zero.
-// When the size is a known constant, inlining to OpZero is safe. Dynamic-size calls remain as
-// runtime calls and go through the chunked preemptible path (memclrNoHeapPointersPreemptible).
+// Turn known-size calls to memclrNoHeapPointers into a Zero.
// Note that we are using types.Types[types.TUINT8] instead of sptr.Type.Elem() - see issue 55122 and CL 431496 for more details.
(SelectN [0] call:(StaticCall {sym} sptr (Const(64|32) [c]) mem))
&& isInlinableMemclr(config, int64(c))
- && (isSameCall(sym, "runtime.memclrNoHeapPointers") || isSameCall(sym, "runtime.memclrNoHeapPointersPreemptible"))
+ && isSameCall(sym, "runtime.memclrNoHeapPointers")
&& call.Uses == 1
&& clobber(call)
=> (Zero {types.Types[types.TUINT8]} [int64(c)] sptr mem)
diff --git a/src/cmd/compile/internal/ssa/rewritegeneric.go b/src/cmd/compile/internal/ssa/rewritegeneric.go
index 122c4fe4cd..ad9dfd46d5 100644
--- a/src/cmd/compile/internal/ssa/rewritegeneric.go
+++ b/src/cmd/compile/internal/ssa/rewritegeneric.go
@@ -30094,7 +30094,7 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
return true
}
// match: (SelectN [0] call:(StaticCall {sym} sptr (Const64 [c]) mem))
- // cond: isInlinableMemclr(config, int64(c)) && (isSameCall(sym, "runtime.memclrNoHeapPointers") || isSameCall(sym, "runtime.memclrNoHeapPointersPreemptible")) && call.Uses == 1 && clobber(call)
+ // cond: isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)
// result: (Zero {types.Types[types.TUINT8]} [int64(c)] sptr mem)
for {
if auxIntToInt64(v.AuxInt) != 0 {
@@ -30112,7 +30112,7 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
break
}
c := auxIntToInt64(call_1.AuxInt)
- if !(isInlinableMemclr(config, int64(c)) && (isSameCall(sym, "runtime.memclrNoHeapPointers") || isSameCall(sym, "runtime.memclrNoHeapPointersPreemptible")) && call.Uses == 1 && clobber(call)) {
+ if !(isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)) {
break
}
v.reset(OpZero)
@@ -30122,7 +30122,7 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
return true
}
// match: (SelectN [0] call:(StaticCall {sym} sptr (Const32 [c]) mem))
- // cond: isInlinableMemclr(config, int64(c)) && (isSameCall(sym, "runtime.memclrNoHeapPointers") || isSameCall(sym, "runtime.memclrNoHeapPointersPreemptible")) && call.Uses == 1 && clobber(call)
+ // cond: isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)
// result: (Zero {types.Types[types.TUINT8]} [int64(c)] sptr mem)
for {
if auxIntToInt64(v.AuxInt) != 0 {
@@ -30140,7 +30140,7 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
break
}
c := auxIntToInt32(call_1.AuxInt)
- if !(isInlinableMemclr(config, int64(c)) && (isSameCall(sym, "runtime.memclrNoHeapPointers") || isSameCall(sym, "runtime.memclrNoHeapPointersPreemptible")) && call.Uses == 1 && clobber(call)) {
+ if !(isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)) {
break
}
v.reset(OpZero)
diff --git a/src/cmd/compile/internal/typecheck/_builtin/runtime.go b/src/cmd/compile/internal/typecheck/_builtin/runtime.go
index 5c8e585831..a1945ae1f4 100644
--- a/src/cmd/compile/internal/typecheck/_builtin/runtime.go
+++ b/src/cmd/compile/internal/typecheck/_builtin/runtime.go
@@ -213,7 +213,6 @@ func moveSliceNoCapNoScan(elemSize uintptr, old *byte, len int) (*byte, int, int
func memmove(to *any, frm *any, length uintptr)
func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)
-func memclrNoHeapPointersPreemptible(ptr unsafe.Pointer, n uintptr)
func memclrHasPointers(ptr unsafe.Pointer, n uintptr)
func memequal(x, y unsafe.Pointer, size uintptr) bool
diff --git a/src/cmd/compile/internal/typecheck/builtin.go b/src/cmd/compile/internal/typecheck/builtin.go
index b8422a177f..82d0ca26b8 100644
--- a/src/cmd/compile/internal/typecheck/builtin.go
+++ b/src/cmd/compile/internal/typecheck/builtin.go
@@ -176,7 +176,6 @@ var runtimeDecls = [...]struct {
{"moveSliceNoCapNoScan", funcTag, 134},
{"memmove", funcTag, 135},
{"memclrNoHeapPointers", funcTag, 136},
- {"memclrNoHeapPointersPreemptible", funcTag, 136},
{"memclrHasPointers", funcTag, 136},
{"memequal", funcTag, 137},
{"memequal0", funcTag, 138},
diff --git a/src/cmd/compile/internal/walk/assign.go b/src/cmd/compile/internal/walk/assign.go
index fe51a19cfc..f640cd83d1 100644
--- a/src/cmd/compile/internal/walk/assign.go
+++ b/src/cmd/compile/internal/walk/assign.go
@@ -718,7 +718,7 @@ func extendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
// hn := l2 * sizeof(elem(s))
hn := typecheck.Conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, l2, ir.NewInt(base.Pos, elemtype.Size())), types.Types[types.TUINTPTR])
- clrname := "memclrNoHeapPointersPreemptible"
+ clrname := "memclrNoHeapPointers"
hasPointers := elemtype.HasPointers()
if hasPointers {
clrname = "memclrHasPointers"
diff --git a/src/cmd/compile/internal/walk/range.go b/src/cmd/compile/internal/walk/range.go
index 10546f0849..4f239694ce 100644
--- a/src/cmd/compile/internal/walk/range.go
+++ b/src/cmd/compile/internal/walk/range.go
@@ -589,8 +589,8 @@ func arrayClear(wbPos src.XPos, a ir.Node, nrange *ir.RangeStmt) ir.Node {
ir.CurFunc.SetWBPos(wbPos)
fn = mkcallstmt("memclrHasPointers", hp, hn)
} else {
- // memclrNoHeapPointersPreemptible(hp, hn)
- fn = mkcallstmt("memclrNoHeapPointersPreemptible", hp, hn)
+ // memclrNoHeapPointers(hp, hn)
+ fn = mkcallstmt("memclrNoHeapPointers", hp, hn)
}
n.Body.Append(fn)