aboutsummaryrefslogtreecommitdiff
path: root/test/codegen/bits.go
diff options
context:
space:
mode:
Diffstat (limited to 'test/codegen/bits.go')
-rw-r--r--test/codegen/bits.go328
1 files changed, 222 insertions, 106 deletions
diff --git a/test/codegen/bits.go b/test/codegen/bits.go
index 39969dcdb2..d9c567b078 100644
--- a/test/codegen/bits.go
+++ b/test/codegen/bits.go
@@ -8,274 +8,387 @@ package codegen
import "math/bits"
-/************************************
- * 64-bit instructions
- ************************************/
+//
+// 64 bit instructions
+//
-func bitcheck64_constleft(a uint64) (n int) {
- // amd64:"BTQ [$]63"
+func bitsCheckConstLeftShiftU64(a uint64) (n int) {
+ // amd64:"BTQ [$]63,"
+ // arm64:"TBNZ [$]63,"
+ // riscv64:"MOV [$]" "AND" "BNEZ"
if a&(1<<63) != 0 {
return 1
}
- // amd64:"BTQ [$]60"
+ // amd64:"BTQ [$]60,"
+ // arm64:"TBNZ [$]60,"
+ // riscv64:"MOV [$]" "AND" "BNEZ"
if a&(1<<60) != 0 {
return 1
}
- // amd64:"BTL [$]0"
+ // amd64:"BTL [$]0,"
+ // arm64:"TBZ [$]0,"
+ // riscv64:"ANDI" "BEQZ"
if a&(1<<0) != 0 {
return 1
}
return 0
}
-func bitcheck64_constright(a [8]uint64) (n int) {
- // amd64:"BTQ [$]63"
+func bitsCheckConstRightShiftU64(a [8]uint64) (n int) {
+ // amd64:"BTQ [$]63,"
+ // arm64:"LSR [$]63," "TBNZ [$]0,"
+ // riscv64:"SRLI" "ANDI" "BNEZ"
if (a[0]>>63)&1 != 0 {
return 1
}
- // amd64:"BTQ [$]63"
+ // amd64:"BTQ [$]63,"
+ // arm64:"LSR [$]63," "CBNZ"
+ // riscv64:"SRLI" "BNEZ"
if a[1]>>63 != 0 {
return 1
}
- // amd64:"BTQ [$]63"
+ // amd64:"BTQ [$]63,"
+ // arm64:"LSR [$]63," "CBZ"
+ // riscv64:"SRLI" "BEQZ"
if a[2]>>63 == 0 {
return 1
}
- // amd64:"BTQ [$]60"
+ // amd64:"BTQ [$]60,"
+ // arm64:"LSR [$]60," "TBZ [$]0,"
+ // riscv64:"SRLI", "ANDI" "BEQZ"
if (a[3]>>60)&1 == 0 {
return 1
}
- // amd64:"BTL [$]1"
+ // amd64:"BTL [$]1,"
+ // arm64:"LSR [$]1," "TBZ [$]0,"
+ // riscv64:"SRLI" "ANDI" "BEQZ"
if (a[4]>>1)&1 == 0 {
return 1
}
- // amd64:"BTL [$]0"
+ // amd64:"BTL [$]0,"
+ // arm64:"TBZ [$]0," -"LSR"
+ // riscv64:"ANDI" "BEQZ" -"SRLI"
if (a[5]>>0)&1 == 0 {
return 1
}
- // amd64:"BTL [$]7"
+ // amd64:"BTL [$]7,"
+ // arm64:"LSR [$]5," "TBNZ [$]2,"
+ // riscv64:"SRLI" "ANDI" "BNEZ"
if (a[6]>>5)&4 == 0 {
return 1
}
return 0
}
-func bitcheck64_var(a, b uint64) (n int) {
+func bitsCheckVarU64(a, b uint64) (n int) {
// amd64:"BTQ"
+ // arm64:"MOVD [$]1," "LSL" "TST"
+ // riscv64:"ANDI [$]63," "SLL " "AND "
if a&(1<<(b&63)) != 0 {
return 1
}
- // amd64:"BTQ" -"BT. [$]0"
+ // amd64:"BTQ" -"BT. [$]0,"
+ // arm64:"LSR" "TBZ [$]0,"
+ // riscv64:"ANDI [$]63," "SRL" "ANDI [$]1,"
if (b>>(a&63))&1 != 0 {
return 1
}
return 0
}
-func bitcheck64_mask(a uint64) (n int) {
- // amd64:"BTQ [$]63"
+func bitsCheckMaskU64(a uint64) (n int) {
+ // amd64:"BTQ [$]63,"
+ // arm64:"TBNZ [$]63,"
+ // riscv64:"MOV [$]" "AND" "BNEZ"
if a&0x8000000000000000 != 0 {
return 1
}
- // amd64:"BTQ [$]59"
+ // amd64:"BTQ [$]59,"
+ // arm64:"TBNZ [$]59,"
+ // riscv64:"MOV [$]" "AND" "BNEZ"
if a&0x800000000000000 != 0 {
return 1
}
- // amd64:"BTL [$]0"
+ // amd64:"BTL [$]0,"
+ // arm64:"TBZ [$]0,"
+ // riscv64:"ANDI" "BEQZ"
if a&0x1 != 0 {
return 1
}
return 0
}
-func biton64(a, b uint64) (n uint64) {
+func bitsSetU64(a, b uint64) (n uint64) {
// amd64:"BTSQ"
+ // arm64:"MOVD [$]1," "LSL" "ORR"
+ // riscv64:"ANDI" "SLL" "OR"
n += b | (1 << (a & 63))
- // amd64:"BTSQ [$]63"
+ // amd64:"BTSQ [$]63,"
+ // arm64:"ORR [$]-9223372036854775808,"
+ // riscv64:"MOV [$]" "OR "
n += a | (1 << 63)
- // amd64:"BTSQ [$]60"
+ // amd64:"BTSQ [$]60,"
+ // arm64:"ORR [$]1152921504606846976,"
+ // riscv64:"MOV [$]" "OR "
n += a | (1 << 60)
- // amd64:"ORQ [$]1"
+ // amd64:"ORQ [$]1,"
+ // arm64:"ORR [$]1,"
+ // riscv64:"ORI"
n += a | (1 << 0)
return n
}
-func bitoff64(a, b uint64) (n uint64) {
+func bitsClearU64(a, b uint64) (n uint64) {
// amd64:"BTRQ"
+ // arm64:"MOVD [$]1," "LSL" "BIC"
+ // riscv64:"ANDI" "SLL" "ANDN"
n += b &^ (1 << (a & 63))
- // amd64:"BTRQ [$]63"
+ // amd64:"BTRQ [$]63,"
+ // arm64:"AND [$]9223372036854775807,"
+ // riscv64:"MOV [$]" "AND "
n += a &^ (1 << 63)
- // amd64:"BTRQ [$]60"
+ // amd64:"BTRQ [$]60,"
+ // arm64:"AND [$]-1152921504606846977,"
+ // riscv64:"MOV [$]" "AND "
n += a &^ (1 << 60)
// amd64:"ANDQ [$]-2"
+ // arm64:"AND [$]-2"
+ // riscv64:"ANDI [$]-2"
n += a &^ (1 << 0)
return n
}
-func clearLastBit(x int64, y int32) (int64, int32) {
- // amd64:"ANDQ [$]-2"
+func bitsClearLowest(x int64, y int32) (int64, int32) {
+ // amd64:"ANDQ [$]-2,"
+ // arm64:"AND [$]-2,"
+ // riscv64:"ANDI [$]-2,"
a := (x >> 1) << 1
- // amd64:"ANDL [$]-2"
+ // amd64:"ANDL [$]-2,"
+ // arm64:"AND [$]-2,"
+ // riscv64:"ANDI [$]-2,"
b := (y >> 1) << 1
return a, b
}
-func bitcompl64(a, b uint64) (n uint64) {
+func bitsFlipU64(a, b uint64) (n uint64) {
// amd64:"BTCQ"
+ // arm64:"MOVD [$]1," "LSL" "EOR"
+ // riscv64:"ANDI" "SLL" "XOR "
n += b ^ (1 << (a & 63))
- // amd64:"BTCQ [$]63"
+ // amd64:"BTCQ [$]63,"
+ // arm64:"EOR [$]-9223372036854775808,"
+ // riscv64:"MOV [$]" "XOR "
n += a ^ (1 << 63)
- // amd64:"BTCQ [$]60"
+ // amd64:"BTCQ [$]60,"
+ // arm64:"EOR [$]1152921504606846976,"
+ // riscv64:"MOV [$]" "XOR "
n += a ^ (1 << 60)
- // amd64:"XORQ [$]1"
+ // amd64:"XORQ [$]1,"
+ // arm64:"EOR [$]1,"
+ // riscv64:"XORI [$]1,"
n += a ^ (1 << 0)
return n
}
-/************************************
- * 32-bit instructions
- ************************************/
+//
+// 32 bit instructions
+//
-func bitcheck32_constleft(a uint32) (n int) {
- // amd64:"BTL [$]31"
+func bitsCheckConstShiftLeftU32(a uint32) (n int) {
+ // amd64:"BTL [$]31,"
+ // arm64:"TBNZ [$]31,"
+ // riscv64:"MOV [$]" "AND" "BNEZ"
if a&(1<<31) != 0 {
return 1
}
- // amd64:"BTL [$]28"
+ // amd64:"BTL [$]28,"
+ // arm64:"TBNZ [$]28,"
+ // riscv64:"ANDI" "BNEZ"
if a&(1<<28) != 0 {
return 1
}
- // amd64:"BTL [$]0"
+ // amd64:"BTL [$]0,"
+ // arm64:"TBZ [$]0,"
+ // riscv64:"ANDI" "BEQZ"
if a&(1<<0) != 0 {
return 1
}
return 0
}
-func bitcheck32_constright(a [8]uint32) (n int) {
- // amd64:"BTL [$]31"
+func bitsCheckConstRightShiftU32(a [8]uint32) (n int) {
+ // amd64:"BTL [$]31,"
+ // arm64:"UBFX [$]31," "CBNZW"
+ // riscv64:"SRLI" "ANDI" "BNEZ"
if (a[0]>>31)&1 != 0 {
return 1
}
- // amd64:"BTL [$]31"
+ // amd64:"BTL [$]31,"
+ // arm64:"UBFX [$]31," "CBNZW"
+ // riscv64:"SRLI" "BNEZ"
if a[1]>>31 != 0 {
return 1
}
- // amd64:"BTL [$]31"
+ // amd64:"BTL [$]31,"
+ // arm64:"UBFX [$]31," "CBZW"
+ // riscv64:"SRLI" "BEQZ"
if a[2]>>31 == 0 {
return 1
}
- // amd64:"BTL [$]28"
+ // amd64:"BTL [$]28,"
+ // arm64:"UBFX [$]28," "TBZ"
+ // riscv64:"SRLI" "ANDI" "BEQZ"
if (a[3]>>28)&1 == 0 {
return 1
}
- // amd64:"BTL [$]1"
+ // amd64:"BTL [$]1,"
+ // arm64:"UBFX [$]1," "TBZ"
+ // riscv64:"SRLI" "ANDI" "BEQZ"
if (a[4]>>1)&1 == 0 {
return 1
}
- // amd64:"BTL [$]0"
+ // amd64:"BTL [$]0,"
+ // arm64:"TBZ" -"UBFX" -"SRL"
+ // riscv64:"ANDI" "BEQZ" -"SRLI "
if (a[5]>>0)&1 == 0 {
return 1
}
- // amd64:"BTL [$]7"
+ // amd64:"BTL [$]7,"
+ // arm64:"UBFX [$]5," "TBNZ"
+ // riscv64:"SRLI" "ANDI" "BNEZ"
if (a[6]>>5)&4 == 0 {
return 1
}
return 0
}
-func bitcheck32_var(a, b uint32) (n int) {
+func bitsCheckVarU32(a, b uint32) (n int) {
// amd64:"BTL"
+ // arm64:"AND [$]31," "MOVD [$]1," "LSL" "TSTW"
+ // riscv64:"ANDI [$]31," "SLL " "AND "
if a&(1<<(b&31)) != 0 {
return 1
}
// amd64:"BTL" -"BT. [$]0"
+ // arm64:"AND [$]31," "LSR" "TBZ"
+ // riscv64:"ANDI [$]31," "SRLW " "ANDI [$]1,"
if (b>>(a&31))&1 != 0 {
return 1
}
return 0
}
-func bitcheck32_mask(a uint32) (n int) {
- // amd64:"BTL [$]31"
+func bitsCheckMaskU32(a uint32) (n int) {
+ // amd64:"BTL [$]31,"
+ // arm64:"TBNZ [$]31,"
+ // riscv64:"MOV [$]" "AND" "BNEZ"
if a&0x80000000 != 0 {
return 1
}
- // amd64:"BTL [$]27"
+ // amd64:"BTL [$]27,"
+ // arm64:"TBNZ [$]27,"
+ // riscv64:"ANDI" "BNEZ"
if a&0x8000000 != 0 {
return 1
}
- // amd64:"BTL [$]0"
+ // amd64:"BTL [$]0,"
+ // arm64:"TBZ [$]0,"
+ // riscv64:"ANDI" "BEQZ"
if a&0x1 != 0 {
return 1
}
return 0
}
-func biton32(a, b uint32) (n uint32) {
+func bitsSetU32(a, b uint32) (n uint32) {
// amd64:"BTSL"
+ // arm64:"AND [$]31," "MOVD [$]1," "LSL" "ORR"
+ // riscv64:"ANDI" "SLL" "OR"
n += b | (1 << (a & 31))
- // amd64:"ORL [$]-2147483648"
+ // amd64:"ORL [$]-2147483648,"
+ // arm64:"ORR [$]-2147483648,"
+ // riscv64:"ORI [$]-2147483648,"
n += a | (1 << 31)
- // amd64:"ORL [$]268435456"
+ // amd64:"ORL [$]268435456,"
+ // arm64:"ORR [$]268435456,"
+ // riscv64:"ORI [$]268435456,"
n += a | (1 << 28)
- // amd64:"ORL [$]1"
+ // amd64:"ORL [$]1,"
+ // arm64:"ORR [$]1,"
+ // riscv64:"ORI [$]1,"
n += a | (1 << 0)
return n
}
-func bitoff32(a, b uint32) (n uint32) {
+func bitsClearU32(a, b uint32) (n uint32) {
// amd64:"BTRL"
+ // arm64:"AND [$]31," "MOVD [$]1," "LSL" "BIC"
+ // riscv64:"ANDI" "SLL" "ANDN"
n += b &^ (1 << (a & 31))
- // amd64:"ANDL [$]2147483647"
+ // amd64:"ANDL [$]2147483647,"
+ // arm64:"AND [$]2147483647,"
+ // riscv64:"ANDI [$]2147483647,"
n += a &^ (1 << 31)
- // amd64:"ANDL [$]-268435457"
+ // amd64:"ANDL [$]-268435457,"
+ // arm64:"AND [$]-268435457,"
+ // riscv64:"ANDI [$]-268435457,"
n += a &^ (1 << 28)
- // amd64:"ANDL [$]-2"
+ // amd64:"ANDL [$]-2,"
+ // arm64:"AND [$]-2,"
+ // riscv64:"ANDI [$]-2,"
n += a &^ (1 << 0)
return n
}
-func bitcompl32(a, b uint32) (n uint32) {
+func bitsFlipU32(a, b uint32) (n uint32) {
// amd64:"BTCL"
+ // arm64:"AND [$]31," "MOVD [$]1," "LSL" "EOR"
+ // riscv64:"ANDI" "SLL" "XOR "
n += b ^ (1 << (a & 31))
- // amd64:"XORL [$]-2147483648"
+ // amd64:"XORL [$]-2147483648,"
+ // arm64:"EOR [$]-2147483648,"
+ // riscv64:"XORI [$]-2147483648,"
n += a ^ (1 << 31)
- // amd64:"XORL [$]268435456"
+ // amd64:"XORL [$]268435456,"
+ // arm64:"EOR [$]268435456,"
+ // riscv64:"XORI [$]268435456,"
n += a ^ (1 << 28)
- // amd64:"XORL [$]1"
+ // amd64:"XORL [$]1,"
+ // arm64:"EOR [$]1,"
+ // riscv64:"XORI [$]1,"
n += a ^ (1 << 0)
return n
}
-// check direct operation on memory with constant and shifted constant sources
-func bitOpOnMem(a []uint32, b, c, d uint32) {
+func bitsOpOnMem(a []uint32, b, c, d uint32) {
+ // check direct operation on memory with constant
+
// amd64:`ANDL\s[$]200,\s\([A-Z][A-Z0-9]+\)`
a[0] &= 200
// amd64:`ORL\s[$]220,\s4\([A-Z][A-Z0-9]+\)`
@@ -284,24 +397,24 @@ func bitOpOnMem(a []uint32, b, c, d uint32) {
a[2] ^= 240
}
-func bitcheckMostNegative(b uint8) bool {
+func bitsCheckMostNegative(b uint8) bool {
// amd64:"TESTB"
+ // arm64:"TSTW" "CSET"
+ // riscv64:"ANDI [$]128," "SNEZ" -"ADDI"
return b&0x80 == 0x80
}
-// Check AND masking on arm64 (Issue #19857)
-
-func and_mask_1(a uint64) uint64 {
+func bitsIssue19857a(a uint64) uint64 {
// arm64:`AND `
return a & ((1 << 63) - 1)
}
-func and_mask_2(a uint64) uint64 {
+func bitsIssue19857b(a uint64) uint64 {
// arm64:`AND `
return a & (1 << 63)
}
-func and_mask_3(a, b uint32) (uint32, uint32) {
+func bitsIssue19857c(a, b uint32) (uint32, uint32) {
// arm/7:`BIC`,-`AND`
a &= 0xffffaaaa
// arm/7:`BFC`,-`AND`,-`BIC`
@@ -309,34 +422,39 @@ func and_mask_3(a, b uint32) (uint32, uint32) {
return a, b
}
-// Check generation of arm64 BIC/EON/ORN instructions
-
-func op_bic(x, y uint32) uint32 {
+func bitsAndNot(x, y uint32) uint32 {
// arm64:`BIC `,-`AND`
+ // loong64:"ANDN " -"AND "
+ // riscv64:"ANDN" -"AND "
return x &^ y
}
-func op_eon(x, y, z uint32, a []uint32, n, m uint64) uint64 {
+func bitsXorNot(x, y, z uint32, a []uint32, n, m uint64) uint64 {
// arm64:`EON `,-`EOR`,-`MVN`
+ // riscv64:"XNOR " -"MOV [$]" -"XOR"
a[0] = x ^ (y ^ 0xffffffff)
// arm64:`EON `,-`EOR`,-`MVN`
+ // riscv64:"XNOR" -"XOR"
a[1] = ^(y ^ z)
// arm64:`EON `,-`XOR`
+ // riscv64:"XNOR" -"XOR" -"NOT"
a[2] = x ^ ^z
// arm64:`EON `,-`EOR`,-`MVN`
+ // riscv64:"XNOR" -"MOV [$]" -"XOR"
return n ^ (m ^ 0xffffffffffffffff)
}
-func op_orn(x, y uint32) uint32 {
- // arm64:`ORN `,-`ORR`
- // loong64:"ORN" ,-"OR "
+func bitsOrNot(x, y uint32) uint32 {
+ // arm64:"ORN " -"ORR"
+ // loong64:"ORN" -"OR "
+ // riscv64:"ORN" -"OR "
return x | ^y
}
-func op_nor(x int64, a []int64) {
+func bitsNotOr(x int64, a []int64) {
// loong64: "MOVV [$]0" "NOR R"
a[0] = ^(0x1234 | x)
// loong64:"NOR" -"XOR"
@@ -345,64 +463,60 @@ func op_nor(x int64, a []int64) {
a[2] = ^(0x12 | 0x34)
}
-func op_andn(x, y uint32) uint32 {
- // loong64:"ANDN " -"AND "
- return x &^ y
-}
-
-// check bitsets
-func bitSetPowerOf2Test(x int) bool {
+func bitsSetPowerOf2Test(x int) bool {
// amd64:"BTL [$]3"
+ // riscv64:"ANDI [$]8," "SNEZ" -"ADDI"
return x&8 == 8
}
-func bitSetTest(x int) bool {
+func bitsSetTest(x int) bool {
// amd64:"ANDL [$]9, AX"
// amd64:"CMPQ AX, [$]9"
+ // riscv64:"ANDI [$]9," "ADDI [$]-9," "SEQZ"
return x&9 == 9
}
-// mask contiguous one bits
-func cont1Mask64U(x uint64) uint64 {
+func bitsMaskContiguousOnes64U(x uint64) uint64 {
// s390x:"RISBGZ [$]16, [$]47, [$]0,"
return x & 0x0000ffffffff0000
}
-// mask contiguous zero bits
-func cont0Mask64U(x uint64) uint64 {
+func bitsMaskContiguousZeroes64U(x uint64) uint64 {
// s390x:"RISBGZ [$]48, [$]15, [$]0,"
return x & 0xffff00000000ffff
}
-func issue44228a(a []int64, i int) bool {
+func bitsIssue44228a(a []int64, i int) bool {
// amd64: "BTQ", -"SHL"
return a[i>>6]&(1<<(i&63)) != 0
}
-func issue44228b(a []int32, i int) bool {
+
+func bitsIssue44228b(a []int32, i int) bool {
// amd64: "BTL", -"SHL"
return a[i>>5]&(1<<(i&31)) != 0
}
-func issue48467(x, y uint64) uint64 {
+func bitsIssue48467(x, y uint64) uint64 {
// arm64: -"NEG"
d, borrow := bits.Sub64(x, y, 0)
return x - d&(-borrow)
}
-func foldConst(x, y uint64) uint64 {
+func bitsFoldConst(x, y uint64) uint64 {
// arm64: "ADDS [$]7" -"MOVD [$]7"
// ppc64x: "ADDC [$]7,"
d, b := bits.Add64(x, 7, 0)
return b & d
}
-func foldConstOutOfRange(a uint64) uint64 {
+func bitsFoldConstOutOfRange(a uint64) uint64 {
// arm64: "MOVD [$]19088744" -"ADD [$]19088744"
return a + 0x1234568
}
-// Verify sign-extended values are not zero-extended under a bit mask (#61297)
-func signextendAndMask8to64(a int8) (s, z uint64) {
+func bitsSignExtendAndMask8to64U(a int8) (s, z uint64) {
+ // Verify sign-extended values are not zero-extended under a bit mask (#61297)
+
// ppc64x: "MOVB", "ANDCC [$]1015,"
s = uint64(a) & 0x3F7
// ppc64x: -"MOVB", "ANDCC [$]247,"
@@ -410,8 +524,9 @@ func signextendAndMask8to64(a int8) (s, z uint64) {
return
}
-// Verify zero-extended values are not sign-extended under a bit mask (#61297)
-func zeroextendAndMask8to64(a int8, b int16) (x, y uint64) {
+func bitsZeroExtendAndMask8toU64(a int8, b int16) (x, y uint64) {
+ // Verify zero-extended values are not sign-extended under a bit mask (#61297)
+
// ppc64x: -"MOVB ", -"ANDCC", "MOVBZ"
x = uint64(a) & 0xFF
// ppc64x: -"MOVH ", -"ANDCC", "MOVHZ"
@@ -419,8 +534,9 @@ func zeroextendAndMask8to64(a int8, b int16) (x, y uint64) {
return
}
-// Verify rotate and mask instructions, and further simplified instructions for small types
-func bitRotateAndMask(io64 [8]uint64, io32 [4]uint32, io16 [4]uint16, io8 [4]uint8) {
+func bitsRotateAndMask(io64 [8]uint64, io32 [4]uint32, io16 [4]uint16, io8 [4]uint8) {
+ // Verify rotate and mask instructions, and further simplified instructions for small types
+
// ppc64x: "RLDICR [$]0, R[0-9]*, [$]47, R"
io64[0] = io64[0] & 0xFFFFFFFFFFFF0000
// ppc64x: "RLDICL [$]0, R[0-9]*, [$]16, R"