diff options
| author | Mauri de Souza Meneguzzo <mauri870@gmail.com> | 2024-05-16 22:12:47 +0000 |
|---|---|---|
| committer | Gopher Robot <gobot@golang.org> | 2024-05-17 18:37:29 +0000 |
| commit | 192d65e46b38381653ccbe16cac49f7fa36aac93 (patch) | |
| tree | 6dc7232d4cef6973a185f3e7fbddea5dfeae439f /src/sync/atomic | |
| parent | 664088b898330a2c73121c5a6f2d4dbec5b180d7 (diff) | |
| download | go-192d65e46b38381653ccbe16cac49f7fa36aac93.tar.xz | |
sync/atomic: public And/Or ops and race instrumentation
This CL implements the new sync/atomic AND and OR apis as well as their race
counterparts.
Fixes #61395
Change-Id: I294eefe4b3ac27bc4ed237edcbfa88a8c646d86f
GitHub-Last-Rev: f174297007c7b81b1ff4a687ef23d955a3ffd4db
GitHub-Pull-Request: golang/go#64331
Reviewed-on: https://go-review.googlesource.com/c/go/+/544455
Reviewed-by: Keith Randall <khr@golang.org>
Reviewed-by: Austin Clements <austin@google.com>
Auto-Submit: Austin Clements <austin@google.com>
LUCI-TryBot-Result: Go LUCI <golang-scoped@luci-project-accounts.iam.gserviceaccount.com>
Reviewed-by: Keith Randall <khr@google.com>
Diffstat (limited to 'src/sync/atomic')
| -rw-r--r-- | src/sync/atomic/asm.s | 30 | ||||
| -rw-r--r-- | src/sync/atomic/atomic_test.go | 466 | ||||
| -rw-r--r-- | src/sync/atomic/doc.go | 50 | ||||
| -rw-r--r-- | src/sync/atomic/type.go | 40 |
4 files changed, 586 insertions, 0 deletions
diff --git a/src/sync/atomic/asm.s b/src/sync/atomic/asm.s index b9318fe8b7..c46869ede7 100644 --- a/src/sync/atomic/asm.s +++ b/src/sync/atomic/asm.s @@ -83,3 +83,33 @@ TEXT ·StoreUint64(SB),NOSPLIT,$0 TEXT ·StoreUintptr(SB),NOSPLIT,$0 JMP internal∕runtime∕atomic·Storeuintptr(SB) + +TEXT ·AndInt32(SB),NOSPLIT,$0 + JMP internal∕runtime∕atomic·And32(SB) + +TEXT ·AndUint32(SB),NOSPLIT,$0 + JMP internal∕runtime∕atomic·And32(SB) + +TEXT ·AndUintptr(SB),NOSPLIT,$0 + JMP internal∕runtime∕atomic·Anduintptr(SB) + +TEXT ·AndInt64(SB),NOSPLIT,$0 + JMP internal∕runtime∕atomic·And64(SB) + +TEXT ·AndUint64(SB),NOSPLIT,$0 + JMP internal∕runtime∕atomic·And64(SB) + +TEXT ·OrInt32(SB),NOSPLIT,$0 + JMP internal∕runtime∕atomic·Or32(SB) + +TEXT ·OrUint32(SB),NOSPLIT,$0 + JMP internal∕runtime∕atomic·Or32(SB) + +TEXT ·OrUintptr(SB),NOSPLIT,$0 + JMP internal∕runtime∕atomic·Oruintptr(SB) + +TEXT ·OrInt64(SB),NOSPLIT,$0 + JMP internal∕runtime∕atomic·Or64(SB) + +TEXT ·OrUint64(SB),NOSPLIT,$0 + JMP internal∕runtime∕atomic·Or64(SB) diff --git a/src/sync/atomic/atomic_test.go b/src/sync/atomic/atomic_test.go index c3604ef0af..0617b27aca 100644 --- a/src/sync/atomic/atomic_test.go +++ b/src/sync/atomic/atomic_test.go @@ -531,6 +531,472 @@ func TestAddUintptrMethod(t *testing.T) { } } +func TestAndInt32(t *testing.T) { + var x struct { + before int32 + i int32 + after int32 + } + x.before = magic32 + x.after = magic32 + x.i = -1 + j := x.i + for mask := int32(1); mask != 0; mask <<= 1 { + old := x.i + k := AndInt32(&x.i, ^mask) + j &= ^mask + if x.i != j || k != old { + t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i, j, k, old) + } + } + if x.before != magic32 || x.after != magic32 { + t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32) + } +} + +func TestAndInt32Method(t *testing.T) { + var x struct { + before int32 + i Int32 + after int32 + } + x.before = magic32 + x.after = magic32 + x.i.Store(-1) + j := x.i.Load() + for mask := int32(1); mask != 0; mask <<= 1 { + old := x.i.Load() + k := x.i.And(^mask) + j &= ^mask + if x.i.Load() != j || k != old { + t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i.Load(), j, k, old) + } + } + if x.before != magic32 || x.after != magic32 { + t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32) + } +} + +func TestAndUint32(t *testing.T) { + var x struct { + before uint32 + i uint32 + after uint32 + } + x.before = magic32 + x.after = magic32 + x.i = 0xffffffff + j := x.i + for mask := uint32(1); mask != 0; mask <<= 1 { + old := x.i + k := AndUint32(&x.i, ^mask) + j &= ^mask + if x.i != j || k != old { + t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i, j, k, old) + } + } + if x.before != magic32 || x.after != magic32 { + t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32) + } +} + +func TestAndUint32Method(t *testing.T) { + var x struct { + before uint32 + i Uint32 + after uint32 + } + x.before = magic32 + x.after = magic32 + x.i.Store(0xffffffff) + j := x.i.Load() + for mask := uint32(1); mask != 0; mask <<= 1 { + old := x.i.Load() + k := x.i.And(^mask) + j &= ^mask + if x.i.Load() != j || k != old { + t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i.Load(), j, k, old) + } + } + if x.before != magic32 || x.after != magic32 { + t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32) + } +} + +func TestAndInt64(t *testing.T) { + var x struct { + before int64 + i int64 + after int64 + } + magic64 := int64(magic64) + x.before = magic64 + x.after = magic64 + x.i = -1 + j := x.i + for mask := int64(1); mask != 0; mask <<= 1 { + old := x.i + k := AndInt64(&x.i, ^mask) + j &= ^mask + if x.i != j || k != old { + t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i, j, k, old) + } + } + if x.before != magic64 || x.after != magic64 { + t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic64, magic64) + } +} + +func TestAndInt64Method(t *testing.T) { + var x struct { + before int64 + i Int64 + after int64 + } + magic64 := int64(magic64) + x.before = magic64 + x.after = magic64 + x.i.Store(-1) + j := x.i.Load() + for mask := int64(1); mask != 0; mask <<= 1 { + old := x.i.Load() + k := x.i.And(^mask) + j &= ^mask + if x.i.Load() != j || k != old { + t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i.Load(), j, k, old) + } + } + if x.before != magic64 || x.after != magic64 { + t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic64, magic64) + } +} + +func TestAndUint64(t *testing.T) { + var x struct { + before uint64 + i uint64 + after uint64 + } + magic64 := uint64(magic64) + x.before = magic64 + x.after = magic64 + x.i = 0xfffffffffffffff + j := x.i + for mask := uint64(1); mask != 0; mask <<= 1 { + old := x.i + k := AndUint64(&x.i, ^mask) + j &= ^mask + if x.i != j || k != old { + t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i, j, k, old) + } + } + if x.before != magic64 || x.after != magic64 { + t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic64, magic64) + } +} + +func TestAndUint64Method(t *testing.T) { + var x struct { + before uint64 + i Uint64 + after uint64 + } + magic64 := uint64(magic64) + x.before = magic64 + x.after = magic64 + x.i.Store(0xfffffffffffffff) + j := x.i.Load() + for mask := uint64(1); mask != 0; mask <<= 1 { + old := x.i.Load() + k := x.i.And(^mask) + j &= ^mask + if x.i.Load() != j || k != old { + t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i.Load(), j, k, old) + } + } + if x.before != magic64 || x.after != magic64 { + t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic64, magic64) + } +} + +func TestAndUintptr(t *testing.T) { + var x struct { + before uintptr + i uintptr + after uintptr + } + var m uint64 = magic64 + magicptr := uintptr(m) + x.before = magicptr + x.after = magicptr + x.i = ^uintptr(0) + j := x.i + for mask := uintptr(1); mask != 0; mask <<= 1 { + old := x.i + k := AndUintptr(&x.i, ^mask) + j &= ^mask + if x.i != j || k != old { + t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i, j, k, old) + } + } + if x.before != magicptr || x.after != magicptr { + t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magicptr, magicptr) + } +} + +func TestAndUintptrMethod(t *testing.T) { + var x struct { + before uintptr + i Uintptr + after uintptr + } + var m uint64 = magic64 + magicptr := uintptr(m) + x.before = magicptr + x.after = magicptr + x.i.Store(^uintptr(0)) + j := x.i.Load() + for mask := uintptr(1); mask != 0; mask <<= 1 { + old := x.i.Load() + k := x.i.And(^mask) + j &= ^mask + if x.i.Load() != j || k != old { + t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i.Load(), j, k, old) + } + } + if x.before != magicptr || x.after != magicptr { + t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magicptr, magicptr) + } +} + +func TestOrInt32(t *testing.T) { + var x struct { + before int32 + i int32 + after int32 + } + x.before = magic32 + x.after = magic32 + var j int32 + for mask := int32(1); mask != 0; mask <<= 1 { + old := x.i + k := OrInt32(&x.i, mask) + j |= mask + if x.i != j || k != old { + t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i, j, k, old) + } + } + if x.before != magic32 || x.after != magic32 { + t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32) + } +} + +func TestOrInt32Method(t *testing.T) { + var x struct { + before int32 + i Int32 + after int32 + } + x.before = magic32 + x.after = magic32 + var j int32 + for mask := int32(1); mask != 0; mask <<= 1 { + old := x.i.Load() + k := x.i.Or(mask) + j |= mask + if x.i.Load() != j || k != old { + t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i.Load(), j, k, old) + } + } + if x.before != magic32 || x.after != magic32 { + t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32) + } +} + +func TestOrUint32(t *testing.T) { + var x struct { + before uint32 + i uint32 + after uint32 + } + x.before = magic32 + x.after = magic32 + var j uint32 + for mask := uint32(1); mask != 0; mask <<= 1 { + old := x.i + k := OrUint32(&x.i, mask) + j |= mask + if x.i != j || k != old { + t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i, j, k, old) + } + } + if x.before != magic32 || x.after != magic32 { + t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32) + } +} + +func TestOrUint32Method(t *testing.T) { + var x struct { + before uint32 + i Uint32 + after uint32 + } + x.before = magic32 + x.after = magic32 + var j uint32 + for mask := uint32(1); mask != 0; mask <<= 1 { + old := x.i.Load() + k := x.i.Or(mask) + j |= mask + if x.i.Load() != j || k != old { + t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i.Load(), j, k, old) + } + } + if x.before != magic32 || x.after != magic32 { + t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32) + } +} + +func TestOrInt64(t *testing.T) { + var x struct { + before int64 + i int64 + after int64 + } + magic64 := int64(magic64) + x.before = magic64 + x.after = magic64 + var j int64 + for mask := int64(1); mask != 0; mask <<= 1 { + old := x.i + k := OrInt64(&x.i, mask) + j |= mask + if x.i != j || k != old { + t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i, j, k, old) + } + } + if x.before != magic64 || x.after != magic64 { + t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic64, magic64) + } +} + +func TestOrInt64Method(t *testing.T) { + var x struct { + before int64 + i Int64 + after int64 + } + magic64 := int64(magic64) + x.before = magic64 + x.after = magic64 + var j int64 + for mask := int64(1); mask != 0; mask <<= 1 { + old := x.i.Load() + k := x.i.Or(mask) + j |= mask + if x.i.Load() != j || k != old { + t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i.Load(), j, k, old) + } + } + if x.before != magic64 || x.after != magic64 { + t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic64, magic64) + } +} + +func TestOrUint64(t *testing.T) { + var x struct { + before uint64 + i uint64 + after uint64 + } + magic64 := uint64(magic64) + x.before = magic64 + x.after = magic64 + var j uint64 + for mask := uint64(1); mask != 0; mask <<= 1 { + old := x.i + k := OrUint64(&x.i, mask) + j |= mask + if x.i != j || k != old { + t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i, j, k, old) + } + } + if x.before != magic64 || x.after != magic64 { + t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic64, magic64) + } +} + +func TestOrUint64Method(t *testing.T) { + var x struct { + before uint64 + i Uint64 + after uint64 + } + magic64 := uint64(magic64) + x.before = magic64 + x.after = magic64 + var j uint64 + for mask := uint64(1); mask != 0; mask <<= 1 { + old := x.i.Load() + k := x.i.Or(mask) + j |= mask + if x.i.Load() != j || k != old { + t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i.Load(), j, k, old) + } + } + if x.before != magic64 || x.after != magic64 { + t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic64, magic64) + } +} + +func TestOrUintptr(t *testing.T) { + var x struct { + before uintptr + i uintptr + after uintptr + } + var m uint64 = magic64 + magicptr := uintptr(m) + x.before = magicptr + x.after = magicptr + var j uintptr + for mask := uintptr(1); mask != 0; mask <<= 1 { + old := x.i + k := OrUintptr(&x.i, mask) + j |= mask + if x.i != j || k != old { + t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i, j, k, old) + } + } + if x.before != magicptr || x.after != magicptr { + t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magicptr, magicptr) + } +} + +func TestOrUintptrMethod(t *testing.T) { + var x struct { + before uintptr + i Uintptr + after uintptr + } + var m uint64 = magic64 + magicptr := uintptr(m) + x.before = magicptr + x.after = magicptr + var j uintptr + for mask := uintptr(1); mask != 0; mask <<= 1 { + old := x.i.Load() + k := x.i.Or(mask) + j |= mask + if x.i.Load() != j || k != old { + t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i.Load(), j, k, old) + } + } + if x.before != magicptr || x.after != magicptr { + t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magicptr, magicptr) + } +} + func TestCompareAndSwapInt32(t *testing.T) { var x struct { before int32 diff --git a/src/sync/atomic/doc.go b/src/sync/atomic/doc.go index c22d1159af..1f7f9b277e 100644 --- a/src/sync/atomic/doc.go +++ b/src/sync/atomic/doc.go @@ -139,6 +139,56 @@ func AddUint64(addr *uint64, delta uint64) (new uint64) // Consider using the more ergonomic and less error-prone [Uintptr.Add] instead. func AddUintptr(addr *uintptr, delta uintptr) (new uintptr) +// AndInt32 atomically performs a bitwise AND operation on *addr using the bitmask provided as mask +// and returns the old value. +// Consider using the more ergonomic and less error-prone [Int32.And] instead. +func AndInt32(addr *int32, mask int32) (old int32) + +// AndUint32 atomically performs a bitwise AND operation on *addr using the bitmask provided as mask +// and returns the old value. +// Consider using the more ergonomic and less error-prone [Uint32.And] instead. +func AndUint32(addr *uint32, mask uint32) (old uint32) + +// AndInt64 atomically performs a bitwise AND operation on *addr using the bitmask provided as mask +// and returns the old value. +// Consider using the more ergonomic and less error-prone [Int64.And] instead. +func AndInt64(addr *int64, mask int64) (old int64) + +// AndUint64 atomically performs a bitwise AND operation on *addr using the bitmask provided as mask +// and returns the old. +// Consider using the more ergonomic and less error-prone [Uint64.And] instead. +func AndUint64(addr *uint64, mask uint64) (old uint64) + +// AndUintptr atomically performs a bitwise AND operation on *addr using the bitmask provided as mask +// and returns the old value. +// Consider using the more ergonomic and less error-prone [Uintptr.And] instead. +func AndUintptr(addr *uintptr, mask uintptr) (old uintptr) + +// OrInt32 atomically performs a bitwise OR operation on *addr using the bitmask provided as mask +// and returns the old value. +// Consider using the more ergonomic and less error-prone [Int32.Or] instead. +func OrInt32(addr *int32, mask int32) (old int32) + +// OrUint32 atomically performs a bitwise OR operation on *addr using the bitmask provided as mask +// and returns the old value. +// Consider using the more ergonomic and less error-prone [Uint32.Or] instead. +func OrUint32(addr *uint32, mask uint32) (old uint32) + +// OrInt64 atomically performs a bitwise OR operation on *addr using the bitmask provided as mask +// and returns the old value. +// Consider using the more ergonomic and less error-prone [Int64.Or] instead. +func OrInt64(addr *int64, mask int64) (old int64) + +// OrUint64 atomically performs a bitwise OR operation on *addr using the bitmask provided as mask +// and returns the old value. +// Consider using the more ergonomic and less error-prone [Uint64.Or] instead. +func OrUint64(addr *uint64, mask uint64) (old uint64) + +// OrUintptr atomically performs a bitwise OR operation on *addr using the bitmask provided as mask +// and returns the old value. +// Consider using the more ergonomic and less error-prone [Uintptr.Or] instead. +func OrUintptr(addr *uintptr, mask uintptr) (old uintptr) + // LoadInt32 atomically loads *addr. // Consider using the more ergonomic and less error-prone [Int32.Load] instead. func LoadInt32(addr *int32) (val int32) diff --git a/src/sync/atomic/type.go b/src/sync/atomic/type.go index 179fa93092..7d2b6805bc 100644 --- a/src/sync/atomic/type.go +++ b/src/sync/atomic/type.go @@ -87,6 +87,14 @@ func (x *Int32) CompareAndSwap(old, new int32) (swapped bool) { // Add atomically adds delta to x and returns the new value. func (x *Int32) Add(delta int32) (new int32) { return AddInt32(&x.v, delta) } +// And atomically performs a bitwise AND operation on x using the bitmask +// provided as mask and returns the old value. +func (x *Int32) And(mask int32) (old int32) { return AndInt32(&x.v, mask) } + +// Or atomically performs a bitwise OR operation on x using the bitmask +// provided as mask and returns the old value. +func (x *Int32) Or(mask int32) (old int32) { return OrInt32(&x.v, mask) } + // An Int64 is an atomic int64. The zero value is zero. type Int64 struct { _ noCopy @@ -111,6 +119,14 @@ func (x *Int64) CompareAndSwap(old, new int64) (swapped bool) { // Add atomically adds delta to x and returns the new value. func (x *Int64) Add(delta int64) (new int64) { return AddInt64(&x.v, delta) } +// And atomically performs a bitwise AND operation on x using the bitmask +// provided as mask and returns the old value. +func (x *Int64) And(mask int64) (old int64) { return AndInt64(&x.v, mask) } + +// Or atomically performs a bitwise OR operation on x using the bitmask +// provided as mask and returns the old value. +func (x *Int64) Or(mask int64) (old int64) { return OrInt64(&x.v, mask) } + // A Uint32 is an atomic uint32. The zero value is zero. type Uint32 struct { _ noCopy @@ -134,6 +150,14 @@ func (x *Uint32) CompareAndSwap(old, new uint32) (swapped bool) { // Add atomically adds delta to x and returns the new value. func (x *Uint32) Add(delta uint32) (new uint32) { return AddUint32(&x.v, delta) } +// And atomically performs a bitwise AND operation on x using the bitmask +// provided as mask and returns the old value. +func (x *Uint32) And(mask uint32) (old uint32) { return AndUint32(&x.v, mask) } + +// Or atomically performs a bitwise OR operation on x using the bitmask +// provided as mask and returns the old value. +func (x *Uint32) Or(mask uint32) (new uint32) { return OrUint32(&x.v, mask) } + // A Uint64 is an atomic uint64. The zero value is zero. type Uint64 struct { _ noCopy @@ -158,6 +182,14 @@ func (x *Uint64) CompareAndSwap(old, new uint64) (swapped bool) { // Add atomically adds delta to x and returns the new value. func (x *Uint64) Add(delta uint64) (new uint64) { return AddUint64(&x.v, delta) } +// And atomically performs a bitwise AND operation on x using the bitmask +// provided as mask and returns the old value. +func (x *Uint64) And(mask uint64) (old uint64) { return AndUint64(&x.v, mask) } + +// Or atomically performs a bitwise OR operation on x using the bitmask +// provided as mask and returns the old value. +func (x *Uint64) Or(mask uint64) (new uint64) { return OrUint64(&x.v, mask) } + // A Uintptr is an atomic uintptr. The zero value is zero. type Uintptr struct { _ noCopy @@ -181,6 +213,14 @@ func (x *Uintptr) CompareAndSwap(old, new uintptr) (swapped bool) { // Add atomically adds delta to x and returns the new value. func (x *Uintptr) Add(delta uintptr) (new uintptr) { return AddUintptr(&x.v, delta) } +// And atomically performs a bitwise AND operation on x using the bitmask +// provided as mask and returns the old value. +func (x *Uintptr) And(mask uintptr) (old uintptr) { return AndUintptr(&x.v, mask) } + +// Or atomically performs a bitwise OR operation on x using the bitmask +// provided as mask and returns the updated value after the OR operation. +func (x *Uintptr) Or(mask uintptr) (old uintptr) { return OrUintptr(&x.v, mask) } + // noCopy may be added to structs which must not be copied // after the first use. // |
