aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile
diff options
context:
space:
mode:
authorKeith Randall <khr@golang.org>2025-11-17 15:33:01 -0800
committerKeith Randall <khr@golang.org>2025-11-18 09:37:02 -0800
commit4d0658bb0871806a8c5551063d1ef1d205916ceb (patch)
tree2d6864f113f84085e43c5a74cd510632a97eeccb /src/cmd/compile
parentba634ca5c7f19105c853db5752cc0f6d3ca76e45 (diff)
downloadgo-4d0658bb0871806a8c5551063d1ef1d205916ceb.tar.xz
cmd/compile: prefer fixed registers for values
For this code: func f() (int, int) { return 0, 0 } We currently generate on arm64: MOVD ZR, R0 MOVD R0, R1 This CL changes that to MOVD ZR, R0 MOVD ZR, R1 Probably not a big performance difference, but it makes the generated code clearer. A followup-ish CL from 633075 when the zero fixed register was exposed to the register allocator. Change-Id: I869a92817dcbbca46c900999fab538e76e10ed05 Reviewed-on: https://go-review.googlesource.com/c/go/+/721440 Reviewed-by: Keith Randall <khr@google.com> Reviewed-by: Junyang Shao <shaojunyang@google.com> LUCI-TryBot-Result: Go LUCI <golang-scoped@luci-project-accounts.iam.gserviceaccount.com>
Diffstat (limited to 'src/cmd/compile')
-rw-r--r--src/cmd/compile/internal/ssa/regalloc.go9
1 files changed, 5 insertions, 4 deletions
diff --git a/src/cmd/compile/internal/ssa/regalloc.go b/src/cmd/compile/internal/ssa/regalloc.go
index b5174acbc9..9ed8a0e86c 100644
--- a/src/cmd/compile/internal/ssa/regalloc.go
+++ b/src/cmd/compile/internal/ssa/regalloc.go
@@ -596,17 +596,18 @@ func (s *regAllocState) allocValToReg(v *Value, mask regMask, nospill bool, pos
var c *Value
if vi.regs != 0 {
// Copy from a register that v is already in.
- r2 := pickReg(vi.regs)
var current *Value
- if !s.allocatable.contains(r2) {
- current = v // v is in a fixed register
+ if vi.regs&^s.allocatable != 0 {
+ // v is in a fixed register, prefer that
+ current = v
} else {
+ r2 := pickReg(vi.regs)
if s.regs[r2].v != v {
panic("bad register state")
}
current = s.regs[r2].c
+ s.usedSinceBlockStart |= regMask(1) << r2
}
- s.usedSinceBlockStart |= regMask(1) << r2
c = s.curBlock.NewValue1(pos, OpCopy, v.Type, current)
} else if v.rematerializeable() {
// Rematerialize instead of loading from the spill location.