mirror of
https://github.com/golang/go.git
synced 2025-05-05 15:43:04 +00:00
cmd/compile: use correct type for byteswaps on multi-byte stores
Use the type of the store for the byteswap, not the type of the store's value argument. Normally when we're storing a 16-bit value, the value being stored is also typed as 16 bits. But sometimes it is typed as something smaller, usually because it is the result of an upcast from a smaller value, and that upcast needs no instructions. If the type of the store's arg is thinner than the type being stored, and the byteswap'd value uses that thinner type, and the byteswap'd value needs to be spilled & restored, that spill/restore happens using the thinner type, which causes us to lose some of the top bits of the value. Fixes #59367 Change-Id: If6ce1e8a76f18bf8e9d79871b6caa438bc3cce4d Reviewed-on: https://go-review.googlesource.com/c/go/+/481395 Reviewed-by: David Chase <drchase@google.com> Reviewed-by: Cherry Mui <cherryyz@google.com> Run-TryBot: Keith Randall <khr@golang.org> TryBot-Result: Gopher Robot <gobot@golang.org>
This commit is contained in:
parent
42866e566a
commit
b3bc8620f8
@ -1758,13 +1758,13 @@
|
||||
x0:(MOVBstore [i-1] {s} p (SHRWconst [8] w) mem))
|
||||
&& x0.Uses == 1
|
||||
&& clobber(x0)
|
||||
=> (MOVWstore [i-1] {s} p (ROLWconst <w.Type> [8] w) mem)
|
||||
=> (MOVWstore [i-1] {s} p (ROLWconst <typ.UInt16> [8] w) mem)
|
||||
(MOVBstore [i] {s} p1 w
|
||||
x0:(MOVBstore [i] {s} p0 (SHRWconst [8] w) mem))
|
||||
&& x0.Uses == 1
|
||||
&& sequentialAddresses(p0, p1, 1)
|
||||
&& clobber(x0)
|
||||
=> (MOVWstore [i] {s} p0 (ROLWconst <w.Type> [8] w) mem)
|
||||
=> (MOVWstore [i] {s} p0 (ROLWconst <typ.UInt16> [8] w) mem)
|
||||
|
||||
// Combine stores + shifts into bswap and larger (unaligned) stores
|
||||
(MOVBstore [i] {s} p w
|
||||
@ -1775,7 +1775,7 @@
|
||||
&& x1.Uses == 1
|
||||
&& x2.Uses == 1
|
||||
&& clobber(x0, x1, x2)
|
||||
=> (MOVLstore [i-3] {s} p (BSWAPL <w.Type> w) mem)
|
||||
=> (MOVLstore [i-3] {s} p (BSWAPL <typ.UInt32> w) mem)
|
||||
(MOVBstore [i] {s} p3 w
|
||||
x2:(MOVBstore [i] {s} p2 (SHRLconst [8] w)
|
||||
x1:(MOVBstore [i] {s} p1 (SHRLconst [16] w)
|
||||
@ -1787,7 +1787,7 @@
|
||||
&& sequentialAddresses(p1, p2, 1)
|
||||
&& sequentialAddresses(p2, p3, 1)
|
||||
&& clobber(x0, x1, x2)
|
||||
=> (MOVLstore [i] {s} p0 (BSWAPL <w.Type> w) mem)
|
||||
=> (MOVLstore [i] {s} p0 (BSWAPL <typ.UInt32> w) mem)
|
||||
|
||||
(MOVBstore [i] {s} p w
|
||||
x6:(MOVBstore [i-1] {s} p (SHRQconst [8] w)
|
||||
@ -1805,7 +1805,7 @@
|
||||
&& x5.Uses == 1
|
||||
&& x6.Uses == 1
|
||||
&& clobber(x0, x1, x2, x3, x4, x5, x6)
|
||||
=> (MOVQstore [i-7] {s} p (BSWAPQ <w.Type> w) mem)
|
||||
=> (MOVQstore [i-7] {s} p (BSWAPQ <typ.UInt64> w) mem)
|
||||
(MOVBstore [i] {s} p7 w
|
||||
x6:(MOVBstore [i] {s} p6 (SHRQconst [8] w)
|
||||
x5:(MOVBstore [i] {s} p5 (SHRQconst [16] w)
|
||||
@ -1829,7 +1829,7 @@
|
||||
&& sequentialAddresses(p5, p6, 1)
|
||||
&& sequentialAddresses(p6, p7, 1)
|
||||
&& clobber(x0, x1, x2, x3, x4, x5, x6)
|
||||
=> (MOVQstore [i] {s} p0 (BSWAPQ <w.Type> w) mem)
|
||||
=> (MOVQstore [i] {s} p0 (BSWAPQ <typ.UInt64> w) mem)
|
||||
|
||||
// Combine constant stores into larger (unaligned) stores.
|
||||
(MOVBstoreconst [c] {s} p1 x:(MOVBstoreconst [a] {s} p0 mem))
|
||||
|
@ -2688,7 +2688,7 @@
|
||||
&& x5.Uses == 1
|
||||
&& x6.Uses == 1
|
||||
&& clobber(x0, x1, x2, x3, x4, x5, x6)
|
||||
=> (MOVDstore [i-7] {s} ptr (REV <w.Type> w) mem)
|
||||
=> (MOVDstore [i-7] {s} ptr (REV <typ.UInt64> w) mem)
|
||||
(MOVBstore [7] {s} p w
|
||||
x0:(MOVBstore [6] {s} p (SRLconst [8] w)
|
||||
x1:(MOVBstore [5] {s} p (SRLconst [16] w)
|
||||
@ -2708,7 +2708,7 @@
|
||||
&& (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1))
|
||||
&& isSamePtr(p1, p)
|
||||
&& clobber(x0, x1, x2, x3, x4, x5, x6)
|
||||
=> (MOVDstoreidx ptr0 idx0 (REV <w.Type> w) mem)
|
||||
=> (MOVDstoreidx ptr0 idx0 (REV <typ.UInt64> w) mem)
|
||||
(MOVBstore [i] {s} ptr w
|
||||
x0:(MOVBstore [i-1] {s} ptr (UBFX [armBFAuxInt(8, 24)] w)
|
||||
x1:(MOVBstore [i-2] {s} ptr (UBFX [armBFAuxInt(16, 16)] w)
|
||||
@ -2717,7 +2717,7 @@
|
||||
&& x1.Uses == 1
|
||||
&& x2.Uses == 1
|
||||
&& clobber(x0, x1, x2)
|
||||
=> (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem)
|
||||
=> (MOVWstore [i-3] {s} ptr (REVW <typ.UInt32> w) mem)
|
||||
(MOVBstore [3] {s} p w
|
||||
x0:(MOVBstore [2] {s} p (UBFX [armBFAuxInt(8, 24)] w)
|
||||
x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (UBFX [armBFAuxInt(16, 16)] w)
|
||||
@ -2729,7 +2729,7 @@
|
||||
&& (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1))
|
||||
&& isSamePtr(p1, p)
|
||||
&& clobber(x0, x1, x2)
|
||||
=> (MOVWstoreidx ptr0 idx0 (REVW <w.Type> w) mem)
|
||||
=> (MOVWstoreidx ptr0 idx0 (REVW <typ.UInt32> w) mem)
|
||||
(MOVBstoreidx ptr (ADDconst [3] idx) w
|
||||
x0:(MOVBstoreidx ptr (ADDconst [2] idx) (UBFX [armBFAuxInt(8, 24)] w)
|
||||
x1:(MOVBstoreidx ptr (ADDconst [1] idx) (UBFX [armBFAuxInt(16, 16)] w)
|
||||
@ -2738,7 +2738,7 @@
|
||||
&& x1.Uses == 1
|
||||
&& x2.Uses == 1
|
||||
&& clobber(x0, x1, x2)
|
||||
=> (MOVWstoreidx ptr idx (REVW <w.Type> w) mem)
|
||||
=> (MOVWstoreidx ptr idx (REVW <typ.UInt32> w) mem)
|
||||
(MOVBstoreidx ptr idx w
|
||||
x0:(MOVBstoreidx ptr (ADDconst [1] idx) (UBFX [armBFAuxInt(8, 24)] w)
|
||||
x1:(MOVBstoreidx ptr (ADDconst [2] idx) (UBFX [armBFAuxInt(16, 16)] w)
|
||||
@ -2756,7 +2756,7 @@
|
||||
&& x1.Uses == 1
|
||||
&& x2.Uses == 1
|
||||
&& clobber(x0, x1, x2)
|
||||
=> (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem)
|
||||
=> (MOVWstore [i-3] {s} ptr (REVW <typ.UInt32> w) mem)
|
||||
(MOVBstore [3] {s} p w
|
||||
x0:(MOVBstore [2] {s} p (SRLconst [8] (MOVDreg w))
|
||||
x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [16] (MOVDreg w))
|
||||
@ -2768,7 +2768,7 @@
|
||||
&& (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1))
|
||||
&& isSamePtr(p1, p)
|
||||
&& clobber(x0, x1, x2)
|
||||
=> (MOVWstoreidx ptr0 idx0 (REVW <w.Type> w) mem)
|
||||
=> (MOVWstoreidx ptr0 idx0 (REVW <typ.UInt32> w) mem)
|
||||
(MOVBstore [i] {s} ptr w
|
||||
x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] w)
|
||||
x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] w)
|
||||
@ -2777,7 +2777,7 @@
|
||||
&& x1.Uses == 1
|
||||
&& x2.Uses == 1
|
||||
&& clobber(x0, x1, x2)
|
||||
=> (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem)
|
||||
=> (MOVWstore [i-3] {s} ptr (REVW <typ.UInt32> w) mem)
|
||||
(MOVBstore [3] {s} p w
|
||||
x0:(MOVBstore [2] {s} p (SRLconst [8] w)
|
||||
x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [16] w)
|
||||
@ -2789,31 +2789,31 @@
|
||||
&& (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1))
|
||||
&& isSamePtr(p1, p)
|
||||
&& clobber(x0, x1, x2)
|
||||
=> (MOVWstoreidx ptr0 idx0 (REVW <w.Type> w) mem)
|
||||
=> (MOVWstoreidx ptr0 idx0 (REVW <typ.UInt32> w) mem)
|
||||
(MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) mem))
|
||||
&& x.Uses == 1
|
||||
&& clobber(x)
|
||||
=> (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem)
|
||||
=> (MOVHstore [i-1] {s} ptr (REV16W <typ.UInt16> w) mem)
|
||||
(MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (SRLconst [8] w) mem))
|
||||
&& x.Uses == 1
|
||||
&& s == nil
|
||||
&& (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1))
|
||||
&& clobber(x)
|
||||
=> (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem)
|
||||
=> (MOVHstoreidx ptr0 idx0 (REV16W <typ.UInt16> w) mem)
|
||||
(MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (UBFX [armBFAuxInt(8, 8)] w) mem))
|
||||
&& x.Uses == 1
|
||||
&& clobber(x)
|
||||
=> (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem)
|
||||
=> (MOVHstore [i-1] {s} ptr (REV16W <typ.UInt16> w) mem)
|
||||
(MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (UBFX [armBFAuxInt(8, 8)] w) mem))
|
||||
&& x.Uses == 1
|
||||
&& s == nil
|
||||
&& (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1))
|
||||
&& clobber(x)
|
||||
=> (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem)
|
||||
=> (MOVHstoreidx ptr0 idx0 (REV16W <typ.UInt16> w) mem)
|
||||
(MOVBstoreidx ptr (ADDconst [1] idx) w x:(MOVBstoreidx ptr idx (UBFX [armBFAuxInt(8, 8)] w) mem))
|
||||
&& x.Uses == 1
|
||||
&& clobber(x)
|
||||
=> (MOVHstoreidx ptr idx (REV16W <w.Type> w) mem)
|
||||
=> (MOVHstoreidx ptr idx (REV16W <typ.UInt16> w) mem)
|
||||
(MOVBstoreidx ptr idx w x:(MOVBstoreidx ptr (ADDconst [1] idx) (UBFX [armBFAuxInt(8, 8)] w) mem))
|
||||
&& x.Uses == 1
|
||||
&& clobber(x)
|
||||
@ -2821,23 +2821,23 @@
|
||||
(MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (SRLconst [8] (MOVDreg w)) mem))
|
||||
&& x.Uses == 1
|
||||
&& clobber(x)
|
||||
=> (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem)
|
||||
=> (MOVHstore [i-1] {s} ptr (REV16W <typ.UInt16> w) mem)
|
||||
(MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (SRLconst [8] (MOVDreg w)) mem))
|
||||
&& x.Uses == 1
|
||||
&& s == nil
|
||||
&& (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1))
|
||||
&& clobber(x)
|
||||
=> (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem)
|
||||
=> (MOVHstoreidx ptr0 idx0 (REV16W <typ.UInt16> w) mem)
|
||||
(MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (UBFX [armBFAuxInt(8, 24)] w) mem))
|
||||
&& x.Uses == 1
|
||||
&& clobber(x)
|
||||
=> (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem)
|
||||
=> (MOVHstore [i-1] {s} ptr (REV16W <typ.UInt16> w) mem)
|
||||
(MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (UBFX [armBFAuxInt(8, 24)] w) mem))
|
||||
&& x.Uses == 1
|
||||
&& s == nil
|
||||
&& (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1))
|
||||
&& clobber(x)
|
||||
=> (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem)
|
||||
=> (MOVHstoreidx ptr0 idx0 (REV16W <typ.UInt16> w) mem)
|
||||
|
||||
// FP simplification
|
||||
(FNEGS (FMULS x y)) => (FNMULS x y)
|
||||
|
@ -10484,7 +10484,7 @@ func rewriteValueAMD64_OpAMD64MOVBstore(v *Value) bool {
|
||||
}
|
||||
// match: (MOVBstore [i] {s} p w x0:(MOVBstore [i-1] {s} p (SHRWconst [8] w) mem))
|
||||
// cond: x0.Uses == 1 && clobber(x0)
|
||||
// result: (MOVWstore [i-1] {s} p (ROLWconst <w.Type> [8] w) mem)
|
||||
// result: (MOVWstore [i-1] {s} p (ROLWconst <typ.UInt16> [8] w) mem)
|
||||
for {
|
||||
i := auxIntToInt32(v.AuxInt)
|
||||
s := auxToSym(v.Aux)
|
||||
@ -10505,7 +10505,7 @@ func rewriteValueAMD64_OpAMD64MOVBstore(v *Value) bool {
|
||||
v.reset(OpAMD64MOVWstore)
|
||||
v.AuxInt = int32ToAuxInt(i - 1)
|
||||
v.Aux = symToAux(s)
|
||||
v0 := b.NewValue0(x0.Pos, OpAMD64ROLWconst, w.Type)
|
||||
v0 := b.NewValue0(x0.Pos, OpAMD64ROLWconst, typ.UInt16)
|
||||
v0.AuxInt = int8ToAuxInt(8)
|
||||
v0.AddArg(w)
|
||||
v.AddArg3(p, v0, mem)
|
||||
@ -10513,7 +10513,7 @@ func rewriteValueAMD64_OpAMD64MOVBstore(v *Value) bool {
|
||||
}
|
||||
// match: (MOVBstore [i] {s} p1 w x0:(MOVBstore [i] {s} p0 (SHRWconst [8] w) mem))
|
||||
// cond: x0.Uses == 1 && sequentialAddresses(p0, p1, 1) && clobber(x0)
|
||||
// result: (MOVWstore [i] {s} p0 (ROLWconst <w.Type> [8] w) mem)
|
||||
// result: (MOVWstore [i] {s} p0 (ROLWconst <typ.UInt16> [8] w) mem)
|
||||
for {
|
||||
i := auxIntToInt32(v.AuxInt)
|
||||
s := auxToSym(v.Aux)
|
||||
@ -10532,7 +10532,7 @@ func rewriteValueAMD64_OpAMD64MOVBstore(v *Value) bool {
|
||||
v.reset(OpAMD64MOVWstore)
|
||||
v.AuxInt = int32ToAuxInt(i)
|
||||
v.Aux = symToAux(s)
|
||||
v0 := b.NewValue0(x0.Pos, OpAMD64ROLWconst, w.Type)
|
||||
v0 := b.NewValue0(x0.Pos, OpAMD64ROLWconst, typ.UInt16)
|
||||
v0.AuxInt = int8ToAuxInt(8)
|
||||
v0.AddArg(w)
|
||||
v.AddArg3(p0, v0, mem)
|
||||
@ -10540,7 +10540,7 @@ func rewriteValueAMD64_OpAMD64MOVBstore(v *Value) bool {
|
||||
}
|
||||
// match: (MOVBstore [i] {s} p w x2:(MOVBstore [i-1] {s} p (SHRLconst [8] w) x1:(MOVBstore [i-2] {s} p (SHRLconst [16] w) x0:(MOVBstore [i-3] {s} p (SHRLconst [24] w) mem))))
|
||||
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)
|
||||
// result: (MOVLstore [i-3] {s} p (BSWAPL <w.Type> w) mem)
|
||||
// result: (MOVLstore [i-3] {s} p (BSWAPL <typ.UInt32> w) mem)
|
||||
for {
|
||||
i := auxIntToInt32(v.AuxInt)
|
||||
s := auxToSym(v.Aux)
|
||||
@ -10585,14 +10585,14 @@ func rewriteValueAMD64_OpAMD64MOVBstore(v *Value) bool {
|
||||
v.reset(OpAMD64MOVLstore)
|
||||
v.AuxInt = int32ToAuxInt(i - 3)
|
||||
v.Aux = symToAux(s)
|
||||
v0 := b.NewValue0(x0.Pos, OpAMD64BSWAPL, w.Type)
|
||||
v0 := b.NewValue0(x0.Pos, OpAMD64BSWAPL, typ.UInt32)
|
||||
v0.AddArg(w)
|
||||
v.AddArg3(p, v0, mem)
|
||||
return true
|
||||
}
|
||||
// match: (MOVBstore [i] {s} p3 w x2:(MOVBstore [i] {s} p2 (SHRLconst [8] w) x1:(MOVBstore [i] {s} p1 (SHRLconst [16] w) x0:(MOVBstore [i] {s} p0 (SHRLconst [24] w) mem))))
|
||||
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && sequentialAddresses(p0, p1, 1) && sequentialAddresses(p1, p2, 1) && sequentialAddresses(p2, p3, 1) && clobber(x0, x1, x2)
|
||||
// result: (MOVLstore [i] {s} p0 (BSWAPL <w.Type> w) mem)
|
||||
// result: (MOVLstore [i] {s} p0 (BSWAPL <typ.UInt32> w) mem)
|
||||
for {
|
||||
i := auxIntToInt32(v.AuxInt)
|
||||
s := auxToSym(v.Aux)
|
||||
@ -10631,14 +10631,14 @@ func rewriteValueAMD64_OpAMD64MOVBstore(v *Value) bool {
|
||||
v.reset(OpAMD64MOVLstore)
|
||||
v.AuxInt = int32ToAuxInt(i)
|
||||
v.Aux = symToAux(s)
|
||||
v0 := b.NewValue0(x0.Pos, OpAMD64BSWAPL, w.Type)
|
||||
v0 := b.NewValue0(x0.Pos, OpAMD64BSWAPL, typ.UInt32)
|
||||
v0.AddArg(w)
|
||||
v.AddArg3(p0, v0, mem)
|
||||
return true
|
||||
}
|
||||
// match: (MOVBstore [i] {s} p w x6:(MOVBstore [i-1] {s} p (SHRQconst [8] w) x5:(MOVBstore [i-2] {s} p (SHRQconst [16] w) x4:(MOVBstore [i-3] {s} p (SHRQconst [24] w) x3:(MOVBstore [i-4] {s} p (SHRQconst [32] w) x2:(MOVBstore [i-5] {s} p (SHRQconst [40] w) x1:(MOVBstore [i-6] {s} p (SHRQconst [48] w) x0:(MOVBstore [i-7] {s} p (SHRQconst [56] w) mem))))))))
|
||||
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && clobber(x0, x1, x2, x3, x4, x5, x6)
|
||||
// result: (MOVQstore [i-7] {s} p (BSWAPQ <w.Type> w) mem)
|
||||
// result: (MOVQstore [i-7] {s} p (BSWAPQ <typ.UInt64> w) mem)
|
||||
for {
|
||||
i := auxIntToInt32(v.AuxInt)
|
||||
s := auxToSym(v.Aux)
|
||||
@ -10731,14 +10731,14 @@ func rewriteValueAMD64_OpAMD64MOVBstore(v *Value) bool {
|
||||
v.reset(OpAMD64MOVQstore)
|
||||
v.AuxInt = int32ToAuxInt(i - 7)
|
||||
v.Aux = symToAux(s)
|
||||
v0 := b.NewValue0(x0.Pos, OpAMD64BSWAPQ, w.Type)
|
||||
v0 := b.NewValue0(x0.Pos, OpAMD64BSWAPQ, typ.UInt64)
|
||||
v0.AddArg(w)
|
||||
v.AddArg3(p, v0, mem)
|
||||
return true
|
||||
}
|
||||
// match: (MOVBstore [i] {s} p7 w x6:(MOVBstore [i] {s} p6 (SHRQconst [8] w) x5:(MOVBstore [i] {s} p5 (SHRQconst [16] w) x4:(MOVBstore [i] {s} p4 (SHRQconst [24] w) x3:(MOVBstore [i] {s} p3 (SHRQconst [32] w) x2:(MOVBstore [i] {s} p2 (SHRQconst [40] w) x1:(MOVBstore [i] {s} p1 (SHRQconst [48] w) x0:(MOVBstore [i] {s} p0 (SHRQconst [56] w) mem))))))))
|
||||
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && sequentialAddresses(p0, p1, 1) && sequentialAddresses(p1, p2, 1) && sequentialAddresses(p2, p3, 1) && sequentialAddresses(p3, p4, 1) && sequentialAddresses(p4, p5, 1) && sequentialAddresses(p5, p6, 1) && sequentialAddresses(p6, p7, 1) && clobber(x0, x1, x2, x3, x4, x5, x6)
|
||||
// result: (MOVQstore [i] {s} p0 (BSWAPQ <w.Type> w) mem)
|
||||
// result: (MOVQstore [i] {s} p0 (BSWAPQ <typ.UInt64> w) mem)
|
||||
for {
|
||||
i := auxIntToInt32(v.AuxInt)
|
||||
s := auxToSym(v.Aux)
|
||||
@ -10817,7 +10817,7 @@ func rewriteValueAMD64_OpAMD64MOVBstore(v *Value) bool {
|
||||
v.reset(OpAMD64MOVQstore)
|
||||
v.AuxInt = int32ToAuxInt(i)
|
||||
v.Aux = symToAux(s)
|
||||
v0 := b.NewValue0(x0.Pos, OpAMD64BSWAPQ, w.Type)
|
||||
v0 := b.NewValue0(x0.Pos, OpAMD64BSWAPQ, typ.UInt64)
|
||||
v0.AddArg(w)
|
||||
v.AddArg3(p0, v0, mem)
|
||||
return true
|
||||
|
@ -8726,6 +8726,7 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
v_0 := v.Args[0]
|
||||
b := v.Block
|
||||
config := b.Func.Config
|
||||
typ := &b.Func.Config.Types
|
||||
// match: (MOVBstore [off1] {sym} (ADDconst [off2] ptr) val mem)
|
||||
// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
|
||||
// result: (MOVBstore [off1+int32(off2)] {sym} ptr val mem)
|
||||
@ -9396,7 +9397,7 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
}
|
||||
// match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] w) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] w) x3:(MOVBstore [i-4] {s} ptr (SRLconst [32] w) x4:(MOVBstore [i-5] {s} ptr (SRLconst [40] w) x5:(MOVBstore [i-6] {s} ptr (SRLconst [48] w) x6:(MOVBstore [i-7] {s} ptr (SRLconst [56] w) mem))))))))
|
||||
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && clobber(x0, x1, x2, x3, x4, x5, x6)
|
||||
// result: (MOVDstore [i-7] {s} ptr (REV <w.Type> w) mem)
|
||||
// result: (MOVDstore [i-7] {s} ptr (REV <typ.UInt64> w) mem)
|
||||
for {
|
||||
i := auxIntToInt32(v.AuxInt)
|
||||
s := auxToSym(v.Aux)
|
||||
@ -9489,14 +9490,14 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
v.reset(OpARM64MOVDstore)
|
||||
v.AuxInt = int32ToAuxInt(i - 7)
|
||||
v.Aux = symToAux(s)
|
||||
v0 := b.NewValue0(x6.Pos, OpARM64REV, w.Type)
|
||||
v0 := b.NewValue0(x6.Pos, OpARM64REV, typ.UInt64)
|
||||
v0.AddArg(w)
|
||||
v.AddArg3(ptr, v0, mem)
|
||||
return true
|
||||
}
|
||||
// match: (MOVBstore [7] {s} p w x0:(MOVBstore [6] {s} p (SRLconst [8] w) x1:(MOVBstore [5] {s} p (SRLconst [16] w) x2:(MOVBstore [4] {s} p (SRLconst [24] w) x3:(MOVBstore [3] {s} p (SRLconst [32] w) x4:(MOVBstore [2] {s} p (SRLconst [40] w) x5:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [48] w) x6:(MOVBstoreidx ptr0 idx0 (SRLconst [56] w) mem))))))))
|
||||
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, x4, x5, x6)
|
||||
// result: (MOVDstoreidx ptr0 idx0 (REV <w.Type> w) mem)
|
||||
// result: (MOVDstoreidx ptr0 idx0 (REV <typ.UInt64> w) mem)
|
||||
for {
|
||||
if auxIntToInt32(v.AuxInt) != 7 {
|
||||
break
|
||||
@ -9595,7 +9596,7 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
continue
|
||||
}
|
||||
v.reset(OpARM64MOVDstoreidx)
|
||||
v0 := b.NewValue0(x5.Pos, OpARM64REV, w.Type)
|
||||
v0 := b.NewValue0(x5.Pos, OpARM64REV, typ.UInt64)
|
||||
v0.AddArg(w)
|
||||
v.AddArg4(ptr0, idx0, v0, mem)
|
||||
return true
|
||||
@ -9604,7 +9605,7 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
}
|
||||
// match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (UBFX [armBFAuxInt(8, 24)] w) x1:(MOVBstore [i-2] {s} ptr (UBFX [armBFAuxInt(16, 16)] w) x2:(MOVBstore [i-3] {s} ptr (UBFX [armBFAuxInt(24, 8)] w) mem))))
|
||||
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)
|
||||
// result: (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem)
|
||||
// result: (MOVWstore [i-3] {s} ptr (REVW <typ.UInt32> w) mem)
|
||||
for {
|
||||
i := auxIntToInt32(v.AuxInt)
|
||||
s := auxToSym(v.Aux)
|
||||
@ -9649,14 +9650,14 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
v.reset(OpARM64MOVWstore)
|
||||
v.AuxInt = int32ToAuxInt(i - 3)
|
||||
v.Aux = symToAux(s)
|
||||
v0 := b.NewValue0(x2.Pos, OpARM64REVW, w.Type)
|
||||
v0 := b.NewValue0(x2.Pos, OpARM64REVW, typ.UInt32)
|
||||
v0.AddArg(w)
|
||||
v.AddArg3(ptr, v0, mem)
|
||||
return true
|
||||
}
|
||||
// match: (MOVBstore [3] {s} p w x0:(MOVBstore [2] {s} p (UBFX [armBFAuxInt(8, 24)] w) x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (UBFX [armBFAuxInt(16, 16)] w) x2:(MOVBstoreidx ptr0 idx0 (UBFX [armBFAuxInt(24, 8)] w) mem))))
|
||||
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2)
|
||||
// result: (MOVWstoreidx ptr0 idx0 (REVW <w.Type> w) mem)
|
||||
// result: (MOVWstoreidx ptr0 idx0 (REVW <typ.UInt32> w) mem)
|
||||
for {
|
||||
if auxIntToInt32(v.AuxInt) != 3 {
|
||||
break
|
||||
@ -9707,7 +9708,7 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
continue
|
||||
}
|
||||
v.reset(OpARM64MOVWstoreidx)
|
||||
v0 := b.NewValue0(x1.Pos, OpARM64REVW, w.Type)
|
||||
v0 := b.NewValue0(x1.Pos, OpARM64REVW, typ.UInt32)
|
||||
v0.AddArg(w)
|
||||
v.AddArg4(ptr0, idx0, v0, mem)
|
||||
return true
|
||||
@ -9716,7 +9717,7 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
}
|
||||
// match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] (MOVDreg w)) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] (MOVDreg w)) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] (MOVDreg w)) mem))))
|
||||
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)
|
||||
// result: (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem)
|
||||
// result: (MOVWstore [i-3] {s} ptr (REVW <typ.UInt32> w) mem)
|
||||
for {
|
||||
i := auxIntToInt32(v.AuxInt)
|
||||
s := auxToSym(v.Aux)
|
||||
@ -9773,14 +9774,14 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
v.reset(OpARM64MOVWstore)
|
||||
v.AuxInt = int32ToAuxInt(i - 3)
|
||||
v.Aux = symToAux(s)
|
||||
v0 := b.NewValue0(x2.Pos, OpARM64REVW, w.Type)
|
||||
v0 := b.NewValue0(x2.Pos, OpARM64REVW, typ.UInt32)
|
||||
v0.AddArg(w)
|
||||
v.AddArg3(ptr, v0, mem)
|
||||
return true
|
||||
}
|
||||
// match: (MOVBstore [3] {s} p w x0:(MOVBstore [2] {s} p (SRLconst [8] (MOVDreg w)) x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [16] (MOVDreg w)) x2:(MOVBstoreidx ptr0 idx0 (SRLconst [24] (MOVDreg w)) mem))))
|
||||
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2)
|
||||
// result: (MOVWstoreidx ptr0 idx0 (REVW <w.Type> w) mem)
|
||||
// result: (MOVWstoreidx ptr0 idx0 (REVW <typ.UInt32> w) mem)
|
||||
for {
|
||||
if auxIntToInt32(v.AuxInt) != 3 {
|
||||
break
|
||||
@ -9843,7 +9844,7 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
continue
|
||||
}
|
||||
v.reset(OpARM64MOVWstoreidx)
|
||||
v0 := b.NewValue0(x1.Pos, OpARM64REVW, w.Type)
|
||||
v0 := b.NewValue0(x1.Pos, OpARM64REVW, typ.UInt32)
|
||||
v0.AddArg(w)
|
||||
v.AddArg4(ptr0, idx0, v0, mem)
|
||||
return true
|
||||
@ -9852,7 +9853,7 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
}
|
||||
// match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] w) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] w) mem))))
|
||||
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)
|
||||
// result: (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem)
|
||||
// result: (MOVWstore [i-3] {s} ptr (REVW <typ.UInt32> w) mem)
|
||||
for {
|
||||
i := auxIntToInt32(v.AuxInt)
|
||||
s := auxToSym(v.Aux)
|
||||
@ -9897,14 +9898,14 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
v.reset(OpARM64MOVWstore)
|
||||
v.AuxInt = int32ToAuxInt(i - 3)
|
||||
v.Aux = symToAux(s)
|
||||
v0 := b.NewValue0(x2.Pos, OpARM64REVW, w.Type)
|
||||
v0 := b.NewValue0(x2.Pos, OpARM64REVW, typ.UInt32)
|
||||
v0.AddArg(w)
|
||||
v.AddArg3(ptr, v0, mem)
|
||||
return true
|
||||
}
|
||||
// match: (MOVBstore [3] {s} p w x0:(MOVBstore [2] {s} p (SRLconst [8] w) x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [16] w) x2:(MOVBstoreidx ptr0 idx0 (SRLconst [24] w) mem))))
|
||||
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2)
|
||||
// result: (MOVWstoreidx ptr0 idx0 (REVW <w.Type> w) mem)
|
||||
// result: (MOVWstoreidx ptr0 idx0 (REVW <typ.UInt32> w) mem)
|
||||
for {
|
||||
if auxIntToInt32(v.AuxInt) != 3 {
|
||||
break
|
||||
@ -9955,7 +9956,7 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
continue
|
||||
}
|
||||
v.reset(OpARM64MOVWstoreidx)
|
||||
v0 := b.NewValue0(x1.Pos, OpARM64REVW, w.Type)
|
||||
v0 := b.NewValue0(x1.Pos, OpARM64REVW, typ.UInt32)
|
||||
v0.AddArg(w)
|
||||
v.AddArg4(ptr0, idx0, v0, mem)
|
||||
return true
|
||||
@ -9964,7 +9965,7 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
}
|
||||
// match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) mem))
|
||||
// cond: x.Uses == 1 && clobber(x)
|
||||
// result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem)
|
||||
// result: (MOVHstore [i-1] {s} ptr (REV16W <typ.UInt16> w) mem)
|
||||
for {
|
||||
i := auxIntToInt32(v.AuxInt)
|
||||
s := auxToSym(v.Aux)
|
||||
@ -9985,14 +9986,14 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
v.reset(OpARM64MOVHstore)
|
||||
v.AuxInt = int32ToAuxInt(i - 1)
|
||||
v.Aux = symToAux(s)
|
||||
v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
|
||||
v0 := b.NewValue0(x.Pos, OpARM64REV16W, typ.UInt16)
|
||||
v0.AddArg(w)
|
||||
v.AddArg3(ptr, v0, mem)
|
||||
return true
|
||||
}
|
||||
// match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (SRLconst [8] w) mem))
|
||||
// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
|
||||
// result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem)
|
||||
// result: (MOVHstoreidx ptr0 idx0 (REV16W <typ.UInt16> w) mem)
|
||||
for {
|
||||
if auxIntToInt32(v.AuxInt) != 1 {
|
||||
break
|
||||
@ -10020,7 +10021,7 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
continue
|
||||
}
|
||||
v.reset(OpARM64MOVHstoreidx)
|
||||
v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
|
||||
v0 := b.NewValue0(v.Pos, OpARM64REV16W, typ.UInt16)
|
||||
v0.AddArg(w)
|
||||
v.AddArg4(ptr0, idx0, v0, mem)
|
||||
return true
|
||||
@ -10029,7 +10030,7 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
}
|
||||
// match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (UBFX [armBFAuxInt(8, 8)] w) mem))
|
||||
// cond: x.Uses == 1 && clobber(x)
|
||||
// result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem)
|
||||
// result: (MOVHstore [i-1] {s} ptr (REV16W <typ.UInt16> w) mem)
|
||||
for {
|
||||
i := auxIntToInt32(v.AuxInt)
|
||||
s := auxToSym(v.Aux)
|
||||
@ -10050,14 +10051,14 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
v.reset(OpARM64MOVHstore)
|
||||
v.AuxInt = int32ToAuxInt(i - 1)
|
||||
v.Aux = symToAux(s)
|
||||
v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
|
||||
v0 := b.NewValue0(x.Pos, OpARM64REV16W, typ.UInt16)
|
||||
v0.AddArg(w)
|
||||
v.AddArg3(ptr, v0, mem)
|
||||
return true
|
||||
}
|
||||
// match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (UBFX [armBFAuxInt(8, 8)] w) mem))
|
||||
// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
|
||||
// result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem)
|
||||
// result: (MOVHstoreidx ptr0 idx0 (REV16W <typ.UInt16> w) mem)
|
||||
for {
|
||||
if auxIntToInt32(v.AuxInt) != 1 {
|
||||
break
|
||||
@ -10085,7 +10086,7 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
continue
|
||||
}
|
||||
v.reset(OpARM64MOVHstoreidx)
|
||||
v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
|
||||
v0 := b.NewValue0(v.Pos, OpARM64REV16W, typ.UInt16)
|
||||
v0.AddArg(w)
|
||||
v.AddArg4(ptr0, idx0, v0, mem)
|
||||
return true
|
||||
@ -10094,7 +10095,7 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
}
|
||||
// match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (SRLconst [8] (MOVDreg w)) mem))
|
||||
// cond: x.Uses == 1 && clobber(x)
|
||||
// result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem)
|
||||
// result: (MOVHstore [i-1] {s} ptr (REV16W <typ.UInt16> w) mem)
|
||||
for {
|
||||
i := auxIntToInt32(v.AuxInt)
|
||||
s := auxToSym(v.Aux)
|
||||
@ -10119,14 +10120,14 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
v.reset(OpARM64MOVHstore)
|
||||
v.AuxInt = int32ToAuxInt(i - 1)
|
||||
v.Aux = symToAux(s)
|
||||
v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
|
||||
v0 := b.NewValue0(x.Pos, OpARM64REV16W, typ.UInt16)
|
||||
v0.AddArg(w)
|
||||
v.AddArg3(ptr, v0, mem)
|
||||
return true
|
||||
}
|
||||
// match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (SRLconst [8] (MOVDreg w)) mem))
|
||||
// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
|
||||
// result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem)
|
||||
// result: (MOVHstoreidx ptr0 idx0 (REV16W <typ.UInt16> w) mem)
|
||||
for {
|
||||
if auxIntToInt32(v.AuxInt) != 1 {
|
||||
break
|
||||
@ -10158,7 +10159,7 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
continue
|
||||
}
|
||||
v.reset(OpARM64MOVHstoreidx)
|
||||
v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
|
||||
v0 := b.NewValue0(v.Pos, OpARM64REV16W, typ.UInt16)
|
||||
v0.AddArg(w)
|
||||
v.AddArg4(ptr0, idx0, v0, mem)
|
||||
return true
|
||||
@ -10167,7 +10168,7 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
}
|
||||
// match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (UBFX [armBFAuxInt(8, 24)] w) mem))
|
||||
// cond: x.Uses == 1 && clobber(x)
|
||||
// result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem)
|
||||
// result: (MOVHstore [i-1] {s} ptr (REV16W <typ.UInt16> w) mem)
|
||||
for {
|
||||
i := auxIntToInt32(v.AuxInt)
|
||||
s := auxToSym(v.Aux)
|
||||
@ -10188,14 +10189,14 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
v.reset(OpARM64MOVHstore)
|
||||
v.AuxInt = int32ToAuxInt(i - 1)
|
||||
v.Aux = symToAux(s)
|
||||
v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
|
||||
v0 := b.NewValue0(x.Pos, OpARM64REV16W, typ.UInt16)
|
||||
v0.AddArg(w)
|
||||
v.AddArg3(ptr, v0, mem)
|
||||
return true
|
||||
}
|
||||
// match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (UBFX [armBFAuxInt(8, 24)] w) mem))
|
||||
// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
|
||||
// result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem)
|
||||
// result: (MOVHstoreidx ptr0 idx0 (REV16W <typ.UInt16> w) mem)
|
||||
for {
|
||||
if auxIntToInt32(v.AuxInt) != 1 {
|
||||
break
|
||||
@ -10223,7 +10224,7 @@ func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
|
||||
continue
|
||||
}
|
||||
v.reset(OpARM64MOVHstoreidx)
|
||||
v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
|
||||
v0 := b.NewValue0(v.Pos, OpARM64REV16W, typ.UInt16)
|
||||
v0.AddArg(w)
|
||||
v.AddArg4(ptr0, idx0, v0, mem)
|
||||
return true
|
||||
@ -10238,6 +10239,7 @@ func rewriteValueARM64_OpARM64MOVBstoreidx(v *Value) bool {
|
||||
v_1 := v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
b := v.Block
|
||||
typ := &b.Func.Config.Types
|
||||
// match: (MOVBstoreidx ptr (MOVDconst [c]) val mem)
|
||||
// cond: is32Bit(c)
|
||||
// result: (MOVBstore [int32(c)] ptr val mem)
|
||||
@ -10400,7 +10402,7 @@ func rewriteValueARM64_OpARM64MOVBstoreidx(v *Value) bool {
|
||||
}
|
||||
// match: (MOVBstoreidx ptr (ADDconst [3] idx) w x0:(MOVBstoreidx ptr (ADDconst [2] idx) (UBFX [armBFAuxInt(8, 24)] w) x1:(MOVBstoreidx ptr (ADDconst [1] idx) (UBFX [armBFAuxInt(16, 16)] w) x2:(MOVBstoreidx ptr idx (UBFX [armBFAuxInt(24, 8)] w) mem))))
|
||||
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)
|
||||
// result: (MOVWstoreidx ptr idx (REVW <w.Type> w) mem)
|
||||
// result: (MOVWstoreidx ptr idx (REVW <typ.UInt32> w) mem)
|
||||
for {
|
||||
ptr := v_0
|
||||
if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 3 {
|
||||
@ -10453,7 +10455,7 @@ func rewriteValueARM64_OpARM64MOVBstoreidx(v *Value) bool {
|
||||
break
|
||||
}
|
||||
v.reset(OpARM64MOVWstoreidx)
|
||||
v0 := b.NewValue0(v.Pos, OpARM64REVW, w.Type)
|
||||
v0 := b.NewValue0(v.Pos, OpARM64REVW, typ.UInt32)
|
||||
v0.AddArg(w)
|
||||
v.AddArg4(ptr, idx, v0, mem)
|
||||
return true
|
||||
@ -10519,7 +10521,7 @@ func rewriteValueARM64_OpARM64MOVBstoreidx(v *Value) bool {
|
||||
}
|
||||
// match: (MOVBstoreidx ptr (ADDconst [1] idx) w x:(MOVBstoreidx ptr idx (UBFX [armBFAuxInt(8, 8)] w) mem))
|
||||
// cond: x.Uses == 1 && clobber(x)
|
||||
// result: (MOVHstoreidx ptr idx (REV16W <w.Type> w) mem)
|
||||
// result: (MOVHstoreidx ptr idx (REV16W <typ.UInt16> w) mem)
|
||||
for {
|
||||
ptr := v_0
|
||||
if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 1 {
|
||||
@ -10540,7 +10542,7 @@ func rewriteValueARM64_OpARM64MOVBstoreidx(v *Value) bool {
|
||||
break
|
||||
}
|
||||
v.reset(OpARM64MOVHstoreidx)
|
||||
v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
|
||||
v0 := b.NewValue0(v.Pos, OpARM64REV16W, typ.UInt16)
|
||||
v0.AddArg(w)
|
||||
v.AddArg4(ptr, idx, v0, mem)
|
||||
return true
|
||||
|
80
test/fixedbugs/issue59367.go
Normal file
80
test/fixedbugs/issue59367.go
Normal file
@ -0,0 +1,80 @@
|
||||
// run
|
||||
|
||||
// Copyright 2023 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package main
|
||||
|
||||
func main() {
|
||||
var b [8]byte
|
||||
one := uint8(1)
|
||||
f16(&one, b[:2])
|
||||
if b[1] != 1 {
|
||||
println("2-byte value lost")
|
||||
}
|
||||
f32(&one, b[:4])
|
||||
if b[3] != 1 {
|
||||
println("4-byte value lost")
|
||||
}
|
||||
f64(&one, b[:8])
|
||||
if b[7] != 1 {
|
||||
println("8-byte value lost")
|
||||
}
|
||||
}
|
||||
|
||||
//go:noinline
|
||||
func f16(p *uint8, b []byte) {
|
||||
_ = b[1] // bounds check
|
||||
x := *p // load a byte
|
||||
y := uint16(x) // zero extend to 16 bits
|
||||
b[0] = byte(y >> 8) // compute ROLW
|
||||
b[1] = byte(y)
|
||||
nop() // spill/restore ROLW
|
||||
b[0] = byte(y >> 8) // use ROLW
|
||||
b[1] = byte(y)
|
||||
}
|
||||
|
||||
//go:noinline
|
||||
func f32(p *uint8, b []byte) {
|
||||
_ = b[3] // bounds check
|
||||
x := *p // load a byte
|
||||
y := uint32(x) // zero extend to 32 bits
|
||||
b[0] = byte(y >> 24) // compute ROLL
|
||||
b[1] = byte(y >> 16)
|
||||
b[2] = byte(y >> 8)
|
||||
b[3] = byte(y)
|
||||
nop() // spill/restore ROLL
|
||||
b[0] = byte(y >> 24) // use ROLL
|
||||
b[1] = byte(y >> 16)
|
||||
b[2] = byte(y >> 8)
|
||||
b[3] = byte(y)
|
||||
}
|
||||
|
||||
//go:noinline
|
||||
func f64(p *uint8, b []byte) {
|
||||
_ = b[7] // bounds check
|
||||
x := *p // load a byte
|
||||
y := uint64(x) // zero extend to 64 bits
|
||||
b[0] = byte(y >> 56) // compute ROLQ
|
||||
b[1] = byte(y >> 48)
|
||||
b[2] = byte(y >> 40)
|
||||
b[3] = byte(y >> 32)
|
||||
b[4] = byte(y >> 24)
|
||||
b[5] = byte(y >> 16)
|
||||
b[6] = byte(y >> 8)
|
||||
b[7] = byte(y)
|
||||
nop() // spill/restore ROLQ
|
||||
b[0] = byte(y >> 56) // use ROLQ
|
||||
b[1] = byte(y >> 48)
|
||||
b[2] = byte(y >> 40)
|
||||
b[3] = byte(y >> 32)
|
||||
b[4] = byte(y >> 24)
|
||||
b[5] = byte(y >> 16)
|
||||
b[6] = byte(y >> 8)
|
||||
b[7] = byte(y)
|
||||
}
|
||||
|
||||
//go:noinline
|
||||
func nop() {
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user