summaryrefslogtreecommitdiffstats
path: root/src/cmd/compile/internal/ssa/rewriteRISCV64latelower.go
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-16 19:23:18 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-16 19:23:18 +0000
commit43a123c1ae6613b3efeed291fa552ecd909d3acf (patch)
treefd92518b7024bc74031f78a1cf9e454b65e73665 /src/cmd/compile/internal/ssa/rewriteRISCV64latelower.go
parentInitial commit. (diff)
downloadgolang-1.20-43a123c1ae6613b3efeed291fa552ecd909d3acf.tar.xz
golang-1.20-43a123c1ae6613b3efeed291fa552ecd909d3acf.zip
Adding upstream version 1.20.14.upstream/1.20.14upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/cmd/compile/internal/ssa/rewriteRISCV64latelower.go')
-rw-r--r--src/cmd/compile/internal/ssa/rewriteRISCV64latelower.go247
1 files changed, 247 insertions, 0 deletions
diff --git a/src/cmd/compile/internal/ssa/rewriteRISCV64latelower.go b/src/cmd/compile/internal/ssa/rewriteRISCV64latelower.go
new file mode 100644
index 0000000..04a9691
--- /dev/null
+++ b/src/cmd/compile/internal/ssa/rewriteRISCV64latelower.go
@@ -0,0 +1,247 @@
+// Code generated from _gen/RISCV64latelower.rules; DO NOT EDIT.
+// generated with: cd _gen; go run .
+
+package ssa
+
+func rewriteValueRISCV64latelower(v *Value) bool {
+ switch v.Op {
+ case OpRISCV64SLLI:
+ return rewriteValueRISCV64latelower_OpRISCV64SLLI(v)
+ case OpRISCV64SRAI:
+ return rewriteValueRISCV64latelower_OpRISCV64SRAI(v)
+ case OpRISCV64SRLI:
+ return rewriteValueRISCV64latelower_OpRISCV64SRLI(v)
+ }
+ return false
+}
+func rewriteValueRISCV64latelower_OpRISCV64SLLI(v *Value) bool {
+ v_0 := v.Args[0]
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (SLLI [c] (MOVBUreg x))
+ // cond: c <= 56
+ // result: (SRLI [56-c] (SLLI <typ.UInt64> [56] x))
+ for {
+ c := auxIntToInt64(v.AuxInt)
+ if v_0.Op != OpRISCV64MOVBUreg {
+ break
+ }
+ x := v_0.Args[0]
+ if !(c <= 56) {
+ break
+ }
+ v.reset(OpRISCV64SRLI)
+ v.AuxInt = int64ToAuxInt(56 - c)
+ v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
+ v0.AuxInt = int64ToAuxInt(56)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SLLI [c] (MOVHUreg x))
+ // cond: c <= 48
+ // result: (SRLI [48-c] (SLLI <typ.UInt64> [48] x))
+ for {
+ c := auxIntToInt64(v.AuxInt)
+ if v_0.Op != OpRISCV64MOVHUreg {
+ break
+ }
+ x := v_0.Args[0]
+ if !(c <= 48) {
+ break
+ }
+ v.reset(OpRISCV64SRLI)
+ v.AuxInt = int64ToAuxInt(48 - c)
+ v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
+ v0.AuxInt = int64ToAuxInt(48)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SLLI [c] (MOVWUreg x))
+ // cond: c <= 32
+ // result: (SRLI [32-c] (SLLI <typ.UInt64> [32] x))
+ for {
+ c := auxIntToInt64(v.AuxInt)
+ if v_0.Op != OpRISCV64MOVWUreg {
+ break
+ }
+ x := v_0.Args[0]
+ if !(c <= 32) {
+ break
+ }
+ v.reset(OpRISCV64SRLI)
+ v.AuxInt = int64ToAuxInt(32 - c)
+ v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
+ v0.AuxInt = int64ToAuxInt(32)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SLLI [0] x)
+ // result: x
+ for {
+ if auxIntToInt64(v.AuxInt) != 0 {
+ break
+ }
+ x := v_0
+ v.copyOf(x)
+ return true
+ }
+ return false
+}
+func rewriteValueRISCV64latelower_OpRISCV64SRAI(v *Value) bool {
+ v_0 := v.Args[0]
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (SRAI [c] (MOVBreg x))
+ // cond: c < 8
+ // result: (SRAI [56+c] (SLLI <typ.Int64> [56] x))
+ for {
+ c := auxIntToInt64(v.AuxInt)
+ if v_0.Op != OpRISCV64MOVBreg {
+ break
+ }
+ x := v_0.Args[0]
+ if !(c < 8) {
+ break
+ }
+ v.reset(OpRISCV64SRAI)
+ v.AuxInt = int64ToAuxInt(56 + c)
+ v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.Int64)
+ v0.AuxInt = int64ToAuxInt(56)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SRAI [c] (MOVHreg x))
+ // cond: c < 16
+ // result: (SRAI [48+c] (SLLI <typ.Int64> [48] x))
+ for {
+ c := auxIntToInt64(v.AuxInt)
+ if v_0.Op != OpRISCV64MOVHreg {
+ break
+ }
+ x := v_0.Args[0]
+ if !(c < 16) {
+ break
+ }
+ v.reset(OpRISCV64SRAI)
+ v.AuxInt = int64ToAuxInt(48 + c)
+ v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.Int64)
+ v0.AuxInt = int64ToAuxInt(48)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SRAI [c] (MOVWreg x))
+ // cond: c < 32
+ // result: (SRAI [32+c] (SLLI <typ.Int64> [32] x))
+ for {
+ c := auxIntToInt64(v.AuxInt)
+ if v_0.Op != OpRISCV64MOVWreg {
+ break
+ }
+ x := v_0.Args[0]
+ if !(c < 32) {
+ break
+ }
+ v.reset(OpRISCV64SRAI)
+ v.AuxInt = int64ToAuxInt(32 + c)
+ v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.Int64)
+ v0.AuxInt = int64ToAuxInt(32)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SRAI [0] x)
+ // result: x
+ for {
+ if auxIntToInt64(v.AuxInt) != 0 {
+ break
+ }
+ x := v_0
+ v.copyOf(x)
+ return true
+ }
+ return false
+}
+func rewriteValueRISCV64latelower_OpRISCV64SRLI(v *Value) bool {
+ v_0 := v.Args[0]
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (SRLI [c] (MOVBUreg x))
+ // cond: c < 8
+ // result: (SRLI [56+c] (SLLI <typ.UInt64> [56] x))
+ for {
+ c := auxIntToInt64(v.AuxInt)
+ if v_0.Op != OpRISCV64MOVBUreg {
+ break
+ }
+ x := v_0.Args[0]
+ if !(c < 8) {
+ break
+ }
+ v.reset(OpRISCV64SRLI)
+ v.AuxInt = int64ToAuxInt(56 + c)
+ v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
+ v0.AuxInt = int64ToAuxInt(56)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SRLI [c] (MOVHUreg x))
+ // cond: c < 16
+ // result: (SRLI [48+c] (SLLI <typ.UInt64> [48] x))
+ for {
+ c := auxIntToInt64(v.AuxInt)
+ if v_0.Op != OpRISCV64MOVHUreg {
+ break
+ }
+ x := v_0.Args[0]
+ if !(c < 16) {
+ break
+ }
+ v.reset(OpRISCV64SRLI)
+ v.AuxInt = int64ToAuxInt(48 + c)
+ v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
+ v0.AuxInt = int64ToAuxInt(48)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SRLI [c] (MOVWUreg x))
+ // cond: c < 32
+ // result: (SRLI [32+c] (SLLI <typ.UInt64> [32] x))
+ for {
+ c := auxIntToInt64(v.AuxInt)
+ if v_0.Op != OpRISCV64MOVWUreg {
+ break
+ }
+ x := v_0.Args[0]
+ if !(c < 32) {
+ break
+ }
+ v.reset(OpRISCV64SRLI)
+ v.AuxInt = int64ToAuxInt(32 + c)
+ v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
+ v0.AuxInt = int64ToAuxInt(32)
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (SRLI [0] x)
+ // result: x
+ for {
+ if auxIntToInt64(v.AuxInt) != 0 {
+ break
+ }
+ x := v_0
+ v.copyOf(x)
+ return true
+ }
+ return false
+}
+func rewriteBlockRISCV64latelower(b *Block) bool {
+ return false
+}