summaryrefslogtreecommitdiff
path: root/test/codegen/memcombine.go
diff options
context:
space:
mode:
authorKeith Randall <khr@golang.org>2023-05-09 13:25:40 -0700
committerKeith Randall <khr@golang.org>2023-05-16 19:13:26 +0000
commit6042a062dc2556a0a1c06d3b85b6c080644da04e (patch)
tree8e7e0c857c322ef3411cd34e486c17e87778a1ab /test/codegen/memcombine.go
parent6fc5e7d4b52986f82ec25d5993ff7f8bde8b61f5 (diff)
downloadgo-git-6042a062dc2556a0a1c06d3b85b6c080644da04e.tar.gz
cmd/compile: make memcombine pass a bit more robust to reassociation of exprs
Be more liberal about expanding the OR tree. Handle any tree shape instead of a fully left or right associative tree. Also remove tail feature, it isn't ever needed. Change-Id: If16bebef94b952a604d6069e9be3d9129994cb6f Reviewed-on: https://go-review.googlesource.com/c/go/+/494056 TryBot-Result: Gopher Robot <gobot@golang.org> Run-TryBot: Keith Randall <khr@golang.org> Reviewed-by: Ryan Berger <ryanbberger@gmail.com> Reviewed-by: Keith Randall <khr@google.com> Reviewed-by: David Chase <drchase@google.com>
Diffstat (limited to 'test/codegen/memcombine.go')
-rw-r--r--test/codegen/memcombine.go26
1 files changed, 26 insertions, 0 deletions
diff --git a/test/codegen/memcombine.go b/test/codegen/memcombine.go
index c7a2c7e5ac..0d1c390dfc 100644
--- a/test/codegen/memcombine.go
+++ b/test/codegen/memcombine.go
@@ -338,6 +338,32 @@ func load_be_byte8_uint64_idx8(s []byte, idx int) uint64 {
return uint64(s[idx<<3])<<56 | uint64(s[(idx<<3)+1])<<48 | uint64(s[(idx<<3)+2])<<40 | uint64(s[(idx<<3)+3])<<32 | uint64(s[(idx<<3)+4])<<24 | uint64(s[(idx<<3)+5])<<16 | uint64(s[(idx<<3)+6])<<8 | uint64(s[(idx<<3)+7])
}
+// Some tougher cases for the memcombine pass.
+
+func reassoc_load_uint32(b []byte) uint32 {
+ // amd64:`MOVL\s\([A-Z]+\)`,-`MOV[BW]`,-`OR`
+ return (uint32(b[0]) | uint32(b[1])<<8) | (uint32(b[2])<<16 | uint32(b[3])<<24)
+}
+
+func extrashift_load_uint32(b []byte) uint32 {
+ // amd64:`MOVL\s\([A-Z]+\)`,`SHLL\s[$]2`,-`MOV[BW]`,-`OR`
+ return uint32(b[0])<<2 | uint32(b[1])<<10 | uint32(b[2])<<18 | uint32(b[3])<<26
+
+}
+
+func outoforder_load_uint32(b []byte) uint32 {
+ // amd64:`MOVL\s\([A-Z]+\)`,-`MOV[BW]`,-`OR`
+ return uint32(b[0]) | uint32(b[2])<<16 | uint32(b[1])<<8 | uint32(b[3])<<24
+}
+
+func extraOr_load_uint32(b []byte, x, y uint32) uint32 {
+ // amd64:`ORL\s\([A-Z]+\)`,-`MOV[BW]`
+ return x | binary.LittleEndian.Uint32(b) | y
+ // TODO: Note that
+ // x | uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 | y
+ // doesn't work because it associates in a way that memcombine can't detect it.
+}
+
// Check load combining across function calls.
func fcall_byte(a [2]byte) [2]byte {