aboutsummaryrefslogtreecommitdiff
path: root/src/runtime/internal/atomic
diff options
context:
space:
mode:
authorCarlos Eduardo Seo <cseo@linux.vnet.ibm.com>2018-08-06 15:36:16 -0500
committerLynn Boger <laboger@linux.vnet.ibm.com>2018-10-23 18:10:38 +0000
commit5c472132bf88cc04c85ad5f848d8a2f77f21b228 (patch)
tree3409d087b15b0d8cc8181ed5a1e60249c1a46e0d /src/runtime/internal/atomic
parent5dbc67a99a99011fce1c1b7c983d2aa35247b950 (diff)
downloadgo-5c472132bf88cc04c85ad5f848d8a2f77f21b228.tar.xz
cmd/compile, runtime: add new lightweight atomics for ppc64x
This change creates the infrastructure for new lightweight atomics primitives in runtime/internal/atomic: - LoadAcq, for load-acquire - StoreRel, for store-release - CasRel, for Compare-and-Swap-release and implements them for ppc64x. There is visible performance improvement in producer-consumer scenarios, like BenchmarkChanProdCons*: benchmark old ns/op new ns/op delta BenchmarkChanProdCons0-48 2034 2034 +0.00% BenchmarkChanProdCons10-48 1798 1608 -10.57% BenchmarkChanProdCons100-48 1596 1585 -0.69% BenchmarkChanProdConsWork0-48 2084 2046 -1.82% BenchmarkChanProdConsWork10-48 1829 1668 -8.80% BenchmarkChanProdConsWork100-48 1650 1650 +0.00% Fixes #21348 Change-Id: I1f6ce377e4a0fe4bd7f5f775e8036f50070ad8db Reviewed-on: https://go-review.googlesource.com/c/142277 Run-TryBot: Lynn Boger <laboger@linux.vnet.ibm.com> TryBot-Result: Gobot Gobot <gobot@golang.org> Reviewed-by: David Chase <drchase@google.com>
Diffstat (limited to 'src/runtime/internal/atomic')
-rw-r--r--src/runtime/internal/atomic/asm_386.s6
-rw-r--r--src/runtime/internal/atomic/asm_amd64.s6
-rw-r--r--src/runtime/internal/atomic/asm_amd64p32.s6
-rw-r--r--src/runtime/internal/atomic/asm_arm.s9
-rw-r--r--src/runtime/internal/atomic/asm_arm64.s3
-rw-r--r--src/runtime/internal/atomic/asm_mips64x.s6
-rw-r--r--src/runtime/internal/atomic/asm_mipsx.s6
-rw-r--r--src/runtime/internal/atomic/asm_ppc64x.s25
-rw-r--r--src/runtime/internal/atomic/asm_s390x.s4
-rw-r--r--src/runtime/internal/atomic/atomic_386.go12
-rw-r--r--src/runtime/internal/atomic/atomic_amd64x.go12
-rw-r--r--src/runtime/internal/atomic/atomic_arm.go9
-rw-r--r--src/runtime/internal/atomic/atomic_arm64.go9
-rw-r--r--src/runtime/internal/atomic/atomic_arm64.s7
-rw-r--r--src/runtime/internal/atomic/atomic_mips64x.go9
-rw-r--r--src/runtime/internal/atomic/atomic_mips64x.s4
-rw-r--r--src/runtime/internal/atomic/atomic_mipsx.go9
-rw-r--r--src/runtime/internal/atomic/atomic_ppc64x.go9
-rw-r--r--src/runtime/internal/atomic/atomic_ppc64x.s9
-rw-r--r--src/runtime/internal/atomic/atomic_s390x.go15
-rw-r--r--src/runtime/internal/atomic/atomic_wasm.go22
21 files changed, 197 insertions, 0 deletions
diff --git a/src/runtime/internal/atomic/asm_386.s b/src/runtime/internal/atomic/asm_386.s
index 86a3ef33b9..13289a88d0 100644
--- a/src/runtime/internal/atomic/asm_386.s
+++ b/src/runtime/internal/atomic/asm_386.s
@@ -23,6 +23,9 @@ TEXT runtime∕internal∕atomic·Cas(SB), NOSPLIT, $0-13
TEXT runtime∕internal∕atomic·Casuintptr(SB), NOSPLIT, $0-13
JMP runtime∕internal∕atomic·Cas(SB)
+TEXT runtime∕internal∕atomic·CasRel(SB), NOSPLIT, $0-13
+ JMP runtime∕internal∕atomic·Cas(SB)
+
TEXT runtime∕internal∕atomic·Loaduintptr(SB), NOSPLIT, $0-8
JMP runtime∕internal∕atomic·Load(SB)
@@ -180,6 +183,9 @@ TEXT runtime∕internal∕atomic·Store(SB), NOSPLIT, $0-8
XCHGL AX, 0(BX)
RET
+TEXT runtime∕internal∕atomic·StoreRel(SB), NOSPLIT, $0-8
+ JMP runtime∕internal∕atomic·Store(SB)
+
// uint64 atomicload64(uint64 volatile* addr);
TEXT runtime∕internal∕atomic·Load64(SB), NOSPLIT, $0-12
MOVL ptr+0(FP), AX
diff --git a/src/runtime/internal/atomic/asm_amd64.s b/src/runtime/internal/atomic/asm_amd64.s
index 6fb5211c9c..e18aee7d59 100644
--- a/src/runtime/internal/atomic/asm_amd64.s
+++ b/src/runtime/internal/atomic/asm_amd64.s
@@ -43,6 +43,9 @@ TEXT runtime∕internal∕atomic·Cas64(SB), NOSPLIT, $0-25
TEXT runtime∕internal∕atomic·Casuintptr(SB), NOSPLIT, $0-25
JMP runtime∕internal∕atomic·Cas64(SB)
+TEXT runtime∕internal∕atomic·CasRel(SB), NOSPLIT, $0-17
+ JMP runtime∕internal∕atomic·Cas(SB)
+
TEXT runtime∕internal∕atomic·Loaduintptr(SB), NOSPLIT, $0-16
JMP runtime∕internal∕atomic·Load64(SB)
@@ -130,6 +133,9 @@ TEXT runtime∕internal∕atomic·Store(SB), NOSPLIT, $0-12
XCHGL AX, 0(BX)
RET
+TEXT runtime∕internal∕atomic·StoreRel(SB), NOSPLIT, $0-12
+ JMP runtime∕internal∕atomic·Store(SB)
+
TEXT runtime∕internal∕atomic·Store64(SB), NOSPLIT, $0-16
MOVQ ptr+0(FP), BX
MOVQ val+8(FP), AX
diff --git a/src/runtime/internal/atomic/asm_amd64p32.s b/src/runtime/internal/atomic/asm_amd64p32.s
index ff590e601b..35b5ef205e 100644
--- a/src/runtime/internal/atomic/asm_amd64p32.s
+++ b/src/runtime/internal/atomic/asm_amd64p32.s
@@ -23,6 +23,9 @@ TEXT runtime∕internal∕atomic·Cas(SB), NOSPLIT, $0-17
TEXT runtime∕internal∕atomic·Casuintptr(SB), NOSPLIT, $0-17
JMP runtime∕internal∕atomic·Cas(SB)
+TEXT runtime∕internal∕atomic·CasRel(SB), NOSPLIT, $0-17
+ JMP runtime∕internal∕atomic·Cas(SB)
+
TEXT runtime∕internal∕atomic·Loaduintptr(SB), NOSPLIT, $0-12
JMP runtime∕internal∕atomic·Load(SB)
@@ -130,6 +133,9 @@ TEXT runtime∕internal∕atomic·Store(SB), NOSPLIT, $0-8
XCHGL AX, 0(BX)
RET
+TEXT runtime∕internal∕atomic·StoreRel(SB), NOSPLIT, $0-8
+ JMP runtime∕internal∕atomic·Store(SB)
+
TEXT runtime∕internal∕atomic·Store64(SB), NOSPLIT, $0-16
MOVL ptr+0(FP), BX
MOVQ val+8(FP), AX
diff --git a/src/runtime/internal/atomic/asm_arm.s b/src/runtime/internal/atomic/asm_arm.s
index 09724c1c34..d4ef11560e 100644
--- a/src/runtime/internal/atomic/asm_arm.s
+++ b/src/runtime/internal/atomic/asm_arm.s
@@ -53,12 +53,18 @@ casfail:
TEXT runtime∕internal∕atomic·Loadp(SB),NOSPLIT|NOFRAME,$0-8
B runtime∕internal∕atomic·Load(SB)
+TEXT runtime∕internal∕atomic·LoadAcq(SB),NOSPLIT|NOFRAME,$0-8
+ B runtime∕internal∕atomic·Load(SB)
+
TEXT runtime∕internal∕atomic·Casuintptr(SB),NOSPLIT,$0-13
B runtime∕internal∕atomic·Cas(SB)
TEXT runtime∕internal∕atomic·Casp1(SB),NOSPLIT,$0-13
B runtime∕internal∕atomic·Cas(SB)
+TEXT runtime∕internal∕atomic·CasRel(SB),NOSPLIT,$0-13
+ B runtime∕internal∕atomic·Cas(SB)
+
TEXT runtime∕internal∕atomic·Loaduintptr(SB),NOSPLIT,$0-8
B runtime∕internal∕atomic·Load(SB)
@@ -71,6 +77,9 @@ TEXT runtime∕internal∕atomic·Storeuintptr(SB),NOSPLIT,$0-8
TEXT runtime∕internal∕atomic·StorepNoWB(SB),NOSPLIT,$0-8
B runtime∕internal∕atomic·Store(SB)
+TEXT runtime∕internal∕atomic·StoreRel(SB),NOSPLIT,$0-8
+ B runtime∕internal∕atomic·Store(SB)
+
TEXT runtime∕internal∕atomic·Xadduintptr(SB),NOSPLIT,$0-12
B runtime∕internal∕atomic·Xadd(SB)
diff --git a/src/runtime/internal/atomic/asm_arm64.s b/src/runtime/internal/atomic/asm_arm64.s
index 56b89a5a0b..8336a859ad 100644
--- a/src/runtime/internal/atomic/asm_arm64.s
+++ b/src/runtime/internal/atomic/asm_arm64.s
@@ -29,6 +29,9 @@ ok:
TEXT runtime∕internal∕atomic·Casuintptr(SB), NOSPLIT, $0-25
B runtime∕internal∕atomic·Cas64(SB)
+TEXT runtime∕internal∕atomic·CasRel(SB), NOSPLIT, $0-17
+ B runtime∕internal∕atomic·Cas(SB)
+
TEXT runtime∕internal∕atomic·Loaduintptr(SB), NOSPLIT, $0-16
B runtime∕internal∕atomic·Load64(SB)
diff --git a/src/runtime/internal/atomic/asm_mips64x.s b/src/runtime/internal/atomic/asm_mips64x.s
index 19d131e5a6..9cb10371b7 100644
--- a/src/runtime/internal/atomic/asm_mips64x.s
+++ b/src/runtime/internal/atomic/asm_mips64x.s
@@ -62,6 +62,9 @@ cas64_fail:
TEXT ·Casuintptr(SB), NOSPLIT, $0-25
JMP ·Cas64(SB)
+TEXT ·CasRel(SB), NOSPLIT, $0-17
+ JMP ·Cas(SB)
+
TEXT ·Loaduintptr(SB), NOSPLIT|NOFRAME, $0-16
JMP ·Load64(SB)
@@ -152,6 +155,9 @@ TEXT ·Xchguintptr(SB), NOSPLIT, $0-24
TEXT ·StorepNoWB(SB), NOSPLIT, $0-16
JMP ·Store64(SB)
+TEXT ·StoreRel(SB), NOSPLIT, $0-12
+ JMP ·Store(SB)
+
TEXT ·Store(SB), NOSPLIT, $0-12
MOVV ptr+0(FP), R1
MOVW val+8(FP), R2
diff --git a/src/runtime/internal/atomic/asm_mipsx.s b/src/runtime/internal/atomic/asm_mipsx.s
index 30550fd02e..73d7ea3ad4 100644
--- a/src/runtime/internal/atomic/asm_mipsx.s
+++ b/src/runtime/internal/atomic/asm_mipsx.s
@@ -70,6 +70,9 @@ try_xchg:
TEXT ·Casuintptr(SB),NOSPLIT,$0-13
JMP ·Cas(SB)
+TEXT ·CasRel(SB),NOSPLIT,$0-13
+ JMP ·Cas(SB)
+
TEXT ·Loaduintptr(SB),NOSPLIT,$0-8
JMP ·Load(SB)
@@ -100,6 +103,9 @@ TEXT ·Xchguintptr(SB),NOSPLIT,$0-12
TEXT ·StorepNoWB(SB),NOSPLIT,$0-8
JMP ·Store(SB)
+TEXT ·StoreRel(SB),NOSPLIT,$0-8
+ JMP ·Store(SB)
+
// void Or8(byte volatile*, byte);
TEXT ·Or8(SB),NOSPLIT,$0-5
MOVW ptr+0(FP), R1
diff --git a/src/runtime/internal/atomic/asm_ppc64x.s b/src/runtime/internal/atomic/asm_ppc64x.s
index a2ed4adc91..052b031cfb 100644
--- a/src/runtime/internal/atomic/asm_ppc64x.s
+++ b/src/runtime/internal/atomic/asm_ppc64x.s
@@ -59,6 +59,24 @@ cas64_fail:
MOVB R0, ret+24(FP)
RET
+TEXT runtime∕internal∕atomic·CasRel(SB), NOSPLIT, $0-17
+ MOVD ptr+0(FP), R3
+ MOVWZ old+8(FP), R4
+ MOVWZ new+12(FP), R5
+ LWSYNC
+cas_again:
+ LWAR (R3), $0, R6 // 0 = Mutex release hint
+ CMPW R6, R4
+ BNE cas_fail
+ STWCCC R5, (R3)
+ BNE cas_again
+ MOVD $1, R3
+ MOVB R3, ret+16(FP)
+ RET
+cas_fail:
+ MOVB R0, ret+16(FP)
+ RET
+
TEXT runtime∕internal∕atomic·Casuintptr(SB), NOSPLIT, $0-25
BR runtime∕internal∕atomic·Cas64(SB)
@@ -159,6 +177,13 @@ TEXT runtime∕internal∕atomic·Store64(SB), NOSPLIT, $0-16
MOVD R4, 0(R3)
RET
+TEXT runtime∕internal∕atomic·StoreRel(SB), NOSPLIT, $0-12
+ MOVD ptr+0(FP), R3
+ MOVW val+8(FP), R4
+ LWSYNC
+ MOVW R4, 0(R3)
+ RET
+
// void runtime∕internal∕atomic·Or8(byte volatile*, byte);
TEXT runtime∕internal∕atomic·Or8(SB), NOSPLIT, $0-9
MOVD ptr+0(FP), R3
diff --git a/src/runtime/internal/atomic/asm_s390x.s b/src/runtime/internal/atomic/asm_s390x.s
index e25703e077..512fde5a12 100644
--- a/src/runtime/internal/atomic/asm_s390x.s
+++ b/src/runtime/internal/atomic/asm_s390x.s
@@ -48,6 +48,10 @@ cas64_fail:
TEXT ·Casuintptr(SB), NOSPLIT, $0-25
BR ·Cas64(SB)
+// func CasRel(ptr *uint32, old, new uint32) bool
+TEXT ·CasRel(SB), NOSPLIT, $0-17
+ BR ·Cas(SB)
+
// func Loaduintptr(ptr *uintptr) uintptr
TEXT ·Loaduintptr(SB), NOSPLIT, $0-16
BR ·Load64(SB)
diff --git a/src/runtime/internal/atomic/atomic_386.go b/src/runtime/internal/atomic/atomic_386.go
index 4284d2bd7d..ad71ebd971 100644
--- a/src/runtime/internal/atomic/atomic_386.go
+++ b/src/runtime/internal/atomic/atomic_386.go
@@ -20,6 +20,12 @@ func Loadp(ptr unsafe.Pointer) unsafe.Pointer {
return *(*unsafe.Pointer)(ptr)
}
+//go:nosplit
+//go:noinline
+func LoadAcq(ptr *uint32) uint32 {
+ return *ptr
+}
+
//go:noescape
func Xadd64(ptr *uint64, delta int64) uint64
@@ -53,10 +59,16 @@ func Or8(ptr *uint8, val uint8)
func Cas64(ptr *uint64, old, new uint64) bool
//go:noescape
+func CasRel(ptr *uint32, old, new uint32) bool
+
+//go:noescape
func Store(ptr *uint32, val uint32)
//go:noescape
func Store64(ptr *uint64, val uint64)
+//go:noescape
+func StoreRel(ptr *uint32, val uint32)
+
// NO go:noescape annotation; see atomic_pointer.go.
func StorepNoWB(ptr unsafe.Pointer, val unsafe.Pointer)
diff --git a/src/runtime/internal/atomic/atomic_amd64x.go b/src/runtime/internal/atomic/atomic_amd64x.go
index 54851d30f4..d4fe461609 100644
--- a/src/runtime/internal/atomic/atomic_amd64x.go
+++ b/src/runtime/internal/atomic/atomic_amd64x.go
@@ -26,6 +26,12 @@ func Load64(ptr *uint64) uint64 {
return *ptr
}
+//go:nosplit
+//go:noinline
+func LoadAcq(ptr *uint32) uint32 {
+ return *ptr
+}
+
//go:noescape
func Xadd(ptr *uint32, delta int32) uint32
@@ -56,11 +62,17 @@ func Or8(ptr *uint8, val uint8)
func Cas64(ptr *uint64, old, new uint64) bool
//go:noescape
+func CasRel(ptr *uint32, old, new uint32) bool
+
+//go:noescape
func Store(ptr *uint32, val uint32)
//go:noescape
func Store64(ptr *uint64, val uint64)
+//go:noescape
+func StoreRel(ptr *uint32, val uint32)
+
// StorepNoWB performs *ptr = val atomically and without a write
// barrier.
//
diff --git a/src/runtime/internal/atomic/atomic_arm.go b/src/runtime/internal/atomic/atomic_arm.go
index 1ecdb11db9..51b42ba238 100644
--- a/src/runtime/internal/atomic/atomic_arm.go
+++ b/src/runtime/internal/atomic/atomic_arm.go
@@ -74,6 +74,9 @@ func StorepNoWB(addr unsafe.Pointer, v unsafe.Pointer)
//go:noescape
func Store(addr *uint32, v uint32)
+//go:noescape
+func StoreRel(addr *uint32, v uint32)
+
//go:nosplit
func goCas64(addr *uint64, old, new uint64) bool {
if uintptr(unsafe.Pointer(addr))&7 != 0 {
@@ -182,9 +185,15 @@ func Load(addr *uint32) uint32
func Loadp(addr unsafe.Pointer) unsafe.Pointer
//go:noescape
+func LoadAcq(addr *uint32) uint32
+
+//go:noescape
func Cas64(addr *uint64, old, new uint64) bool
//go:noescape
+func CasRel(addr *uint32, old, new uint32) bool
+
+//go:noescape
func Xadd64(addr *uint64, delta int64) uint64
//go:noescape
diff --git a/src/runtime/internal/atomic/atomic_arm64.go b/src/runtime/internal/atomic/atomic_arm64.go
index 3554b7f236..a2da27e7ed 100644
--- a/src/runtime/internal/atomic/atomic_arm64.go
+++ b/src/runtime/internal/atomic/atomic_arm64.go
@@ -36,6 +36,9 @@ func Load64(ptr *uint64) uint64
func Loadp(ptr unsafe.Pointer) unsafe.Pointer
//go:noescape
+func LoadAcq(addr *uint32) uint32
+
+//go:noescape
func Or8(ptr *uint8, val uint8)
//go:noescape
@@ -45,6 +48,9 @@ func And8(ptr *uint8, val uint8)
func Cas64(ptr *uint64, old, new uint64) bool
//go:noescape
+func CasRel(ptr *uint32, old, new uint32) bool
+
+//go:noescape
func Store(ptr *uint32, val uint32)
//go:noescape
@@ -52,3 +58,6 @@ func Store64(ptr *uint64, val uint64)
// NO go:noescape annotation; see atomic_pointer.go.
func StorepNoWB(ptr unsafe.Pointer, val unsafe.Pointer)
+
+//go:noescape
+func StoreRel(ptr *uint32, val uint32)
diff --git a/src/runtime/internal/atomic/atomic_arm64.s b/src/runtime/internal/atomic/atomic_arm64.s
index 354fd1e94b..c979f2246f 100644
--- a/src/runtime/internal/atomic/atomic_arm64.s
+++ b/src/runtime/internal/atomic/atomic_arm64.s
@@ -25,9 +25,16 @@ TEXT ·Loadp(SB),NOSPLIT,$0-16
MOVD R0, ret+8(FP)
RET
+// uint32 runtime∕internal∕atomic·LoadAcq(uint32 volatile* addr)
+TEXT ·LoadAcq(SB),NOSPLIT,$0-12
+ B ·Load(SB)
+
TEXT runtime∕internal∕atomic·StorepNoWB(SB), NOSPLIT, $0-16
B runtime∕internal∕atomic·Store64(SB)
+TEXT runtime∕internal∕atomic·StoreRel(SB), NOSPLIT, $0-12
+ B runtime∕internal∕atomic·Store(SB)
+
TEXT runtime∕internal∕atomic·Store(SB), NOSPLIT, $0-12
MOVD ptr+0(FP), R0
MOVW val+8(FP), R1
diff --git a/src/runtime/internal/atomic/atomic_mips64x.go b/src/runtime/internal/atomic/atomic_mips64x.go
index d06ea4809a..98a8fca929 100644
--- a/src/runtime/internal/atomic/atomic_mips64x.go
+++ b/src/runtime/internal/atomic/atomic_mips64x.go
@@ -36,6 +36,9 @@ func Load64(ptr *uint64) uint64
func Loadp(ptr unsafe.Pointer) unsafe.Pointer
//go:noescape
+func LoadAcq(ptr *uint32) uint32
+
+//go:noescape
func And8(ptr *uint8, val uint8)
//go:noescape
@@ -47,6 +50,9 @@ func Or8(ptr *uint8, val uint8)
func Cas64(ptr *uint64, old, new uint64) bool
//go:noescape
+func CasRel(ptr *uint32, old, new uint32) bool
+
+//go:noescape
func Store(ptr *uint32, val uint32)
//go:noescape
@@ -54,3 +60,6 @@ func Store64(ptr *uint64, val uint64)
// NO go:noescape annotation; see atomic_pointer.go.
func StorepNoWB(ptr unsafe.Pointer, val unsafe.Pointer)
+
+//go:noescape
+func StoreRel(ptr *uint32, val uint32)
diff --git a/src/runtime/internal/atomic/atomic_mips64x.s b/src/runtime/internal/atomic/atomic_mips64x.s
index 087672f5cc..5214afe2d6 100644
--- a/src/runtime/internal/atomic/atomic_mips64x.s
+++ b/src/runtime/internal/atomic/atomic_mips64x.s
@@ -34,3 +34,7 @@ TEXT ·Loadp(SB),NOSPLIT|NOFRAME,$0-16
SYNC
MOVV R1, ret+8(FP)
RET
+
+// uint32 runtime∕internal∕atomic·LoadAcq(uint32 volatile* ptr)
+TEXT ·LoadAcq(SB),NOSPLIT|NOFRAME,$0-12
+ JMP atomic·Load(SB)
diff --git a/src/runtime/internal/atomic/atomic_mipsx.go b/src/runtime/internal/atomic/atomic_mipsx.go
index 55943f6925..1cd6d9a9ce 100644
--- a/src/runtime/internal/atomic/atomic_mipsx.go
+++ b/src/runtime/internal/atomic/atomic_mipsx.go
@@ -120,6 +120,9 @@ func Load(ptr *uint32) uint32
func Loadp(ptr unsafe.Pointer) unsafe.Pointer
//go:noescape
+func LoadAcq(ptr *uint32) uint32
+
+//go:noescape
func And8(ptr *uint8, val uint8)
//go:noescape
@@ -130,3 +133,9 @@ func Store(ptr *uint32, val uint32)
// NO go:noescape annotation; see atomic_pointer.go.
func StorepNoWB(ptr unsafe.Pointer, val unsafe.Pointer)
+
+//go:noescape
+func StoreRel(ptr *uint32, val uint32)
+
+//go:noescape
+func CasRel(addr *uint32, old, new uint32) bool
diff --git a/src/runtime/internal/atomic/atomic_ppc64x.go b/src/runtime/internal/atomic/atomic_ppc64x.go
index 72c98eb0c5..4f1a95c5bd 100644
--- a/src/runtime/internal/atomic/atomic_ppc64x.go
+++ b/src/runtime/internal/atomic/atomic_ppc64x.go
@@ -36,6 +36,9 @@ func Load64(ptr *uint64) uint64
func Loadp(ptr unsafe.Pointer) unsafe.Pointer
//go:noescape
+func LoadAcq(ptr *uint32) uint32
+
+//go:noescape
func And8(ptr *uint8, val uint8)
//go:noescape
@@ -47,10 +50,16 @@ func Or8(ptr *uint8, val uint8)
func Cas64(ptr *uint64, old, new uint64) bool
//go:noescape
+func CasRel(ptr *uint32, old, new uint32) bool
+
+//go:noescape
func Store(ptr *uint32, val uint32)
//go:noescape
func Store64(ptr *uint64, val uint64)
+//go:noescape
+func StoreRel(ptr *uint32, val uint32)
+
// NO go:noescape annotation; see atomic_pointer.go.
func StorepNoWB(ptr unsafe.Pointer, val unsafe.Pointer)
diff --git a/src/runtime/internal/atomic/atomic_ppc64x.s b/src/runtime/internal/atomic/atomic_ppc64x.s
index c9c2d1fc0c..c079ea494f 100644
--- a/src/runtime/internal/atomic/atomic_ppc64x.s
+++ b/src/runtime/internal/atomic/atomic_ppc64x.s
@@ -38,3 +38,12 @@ TEXT ·Loadp(SB),NOSPLIT|NOFRAME,$-8-16
ISYNC
MOVD R3, ret+8(FP)
RET
+
+// uint32 runtime∕internal∕atomic·LoadAcq(uint32 volatile* ptr)
+TEXT ·LoadAcq(SB),NOSPLIT|NOFRAME,$-8-12
+ MOVD ptr+0(FP), R3
+ MOVWZ 0(R3), R3
+ CMPW R3, R3, CR7
+ BC 4, 30, 1(PC) // bne- cr7, 0x4
+ MOVW R3, ret+8(FP)
+ RET
diff --git a/src/runtime/internal/atomic/atomic_s390x.go b/src/runtime/internal/atomic/atomic_s390x.go
index 9343853485..ec294a27ba 100644
--- a/src/runtime/internal/atomic/atomic_s390x.go
+++ b/src/runtime/internal/atomic/atomic_s390x.go
@@ -24,6 +24,12 @@ func Load64(ptr *uint64) uint64 {
return *ptr
}
+//go:nosplit
+//go:noinline
+func LoadAcq(ptr *uint32) uint32 {
+ return *ptr
+}
+
//go:noinline
//go:nosplit
func Store(ptr *uint32, val uint32) {
@@ -43,6 +49,12 @@ func StorepNoWB(ptr unsafe.Pointer, val unsafe.Pointer) {
*(*uintptr)(ptr) = uintptr(val)
}
+//go:noinline
+//go:nosplit
+func StoreRel(ptr *uint32, val uint32) {
+ *ptr = val
+}
+
//go:noescape
func And8(ptr *uint8, val uint8)
@@ -71,3 +83,6 @@ func Xchguintptr(ptr *uintptr, new uintptr) uintptr
//go:noescape
func Cas64(ptr *uint64, old, new uint64) bool
+
+//go:noescape
+func CasRel(ptr *uint32, old, new uint32) bool
diff --git a/src/runtime/internal/atomic/atomic_wasm.go b/src/runtime/internal/atomic/atomic_wasm.go
index cbf254fcb5..71288e9003 100644
--- a/src/runtime/internal/atomic/atomic_wasm.go
+++ b/src/runtime/internal/atomic/atomic_wasm.go
@@ -23,6 +23,12 @@ func Loadp(ptr unsafe.Pointer) unsafe.Pointer {
//go:nosplit
//go:noinline
+func LoadAcq(ptr *uint32) uint32 {
+ return *ptr
+}
+
+//go:nosplit
+//go:noinline
func Load64(ptr *uint64) uint64 {
return *ptr
}
@@ -107,6 +113,12 @@ func Store(ptr *uint32, val uint32) {
//go:nosplit
//go:noinline
+func StoreRel(ptr *uint32, val uint32) {
+ *ptr = val
+}
+
+//go:nosplit
+//go:noinline
func Store64(ptr *uint64, val uint64) {
*ptr = val
}
@@ -149,6 +161,16 @@ func Casuintptr(ptr *uintptr, old, new uintptr) bool {
//go:nosplit
//go:noinline
+func CasRel(ptr *uint32, old, new uint32) bool {
+ if *ptr == old {
+ *ptr = new
+ return true
+ }
+ return false
+}
+
+//go:nosplit
+//go:noinline
func Storeuintptr(ptr *uintptr, new uintptr) {
*ptr = new
}