mirror of
https://github.com/golang/go.git
synced 2025-05-05 15:43:04 +00:00
[release-branch.go1.19] cmd/compile: on PPC64, fix sign/zero extension when masking
This backport required manual cleanup as go1.20 combined the ANDCCconst and ANDconst opcodes into one. Similarly, CL 456736 introduced a suble bug by using (Select1 (ANDCCconst ...)). This usually worked because the same rule quietly changes the type of the newly created ANDCCconst to a tuple. This change exposed the bug, so fix it too. (ANDconst [y] (MOV.*reg x)) should only be merged when zero extending. Otherwise, sign bits are lost on negative values. (ANDconst [0xFF] (MOVBreg x)) should be simplified to a zero extension of x. Likewise for the MOVHreg variant. Fixes #61319 Change-Id: I04e4fd7dc6a826e870681f37506620d48393698b Reviewed-on: https://go-review.googlesource.com/c/go/+/508775 TryBot-Result: Gopher Robot <gobot@golang.org> Run-TryBot: Paul Murphy <murp@ibm.com> Reviewed-by: Bryan Mills <bcmills@google.com> Reviewed-by: Cherry Mui <cherryyz@google.com> Reviewed-on: https://go-review.googlesource.com/c/go/+/509018 Auto-Submit: Heschi Kreinick <heschi@google.com> TryBot-Bypass: Heschi Kreinick <heschi@google.com>
This commit is contained in:
parent
e58941fc25
commit
6ce543d137
@ -578,9 +578,9 @@
|
|||||||
((EQ|NE|LT|LE|GT|GE) (CMPconst [0] z:(XOR x y)) yes no) && z.Uses == 1 => ((EQ|NE|LT|LE|GT|GE) (XORCC x y) yes no)
|
((EQ|NE|LT|LE|GT|GE) (CMPconst [0] z:(XOR x y)) yes no) && z.Uses == 1 => ((EQ|NE|LT|LE|GT|GE) (XORCC x y) yes no)
|
||||||
|
|
||||||
// Only lower after bool is lowered. It should always lower. This helps ensure the folding below happens reliably.
|
// Only lower after bool is lowered. It should always lower. This helps ensure the folding below happens reliably.
|
||||||
(CondSelect x y bool) && flagArg(bool) == nil => (ISEL [6] x y (Select1 <types.TypeFlags> (ANDCCconst [1] bool)))
|
(CondSelect x y bool) && flagArg(bool) == nil => (ISEL [6] x y (ANDCCconst [1] bool))
|
||||||
// Fold any CR -> GPR -> CR transfers when applying the above rule.
|
// Fold any CR -> GPR -> CR transfers when applying the above rule.
|
||||||
(ISEL [6] x y (Select1 (ANDCCconst [1] (ISELB [c] one cmp)))) => (ISEL [c] x y cmp)
|
(ISEL [6] x y (ANDCCconst [1] (ISELB [c] one cmp))) => (ISEL [c] x y cmp)
|
||||||
|
|
||||||
// Lowering loads
|
// Lowering loads
|
||||||
(Load <t> ptr mem) && (is64BitInt(t) || isPtr(t)) => (MOVDload ptr mem)
|
(Load <t> ptr mem) && (is64BitInt(t) || isPtr(t)) => (MOVDload ptr mem)
|
||||||
@ -750,16 +750,16 @@
|
|||||||
|
|
||||||
// small and of zero-extend => either zero-extend or small and
|
// small and of zero-extend => either zero-extend or small and
|
||||||
(ANDconst [c] y:(MOVBZreg _)) && c&0xFF == 0xFF => y
|
(ANDconst [c] y:(MOVBZreg _)) && c&0xFF == 0xFF => y
|
||||||
(ANDconst [0xFF] y:(MOVBreg _)) => y
|
(ANDconst [0xFF] (MOVBreg x)) => (MOVBZreg x)
|
||||||
(ANDconst [c] y:(MOVHZreg _)) && c&0xFFFF == 0xFFFF => y
|
(ANDconst [c] y:(MOVHZreg _)) && c&0xFFFF == 0xFFFF => y
|
||||||
(ANDconst [0xFFFF] y:(MOVHreg _)) => y
|
(ANDconst [0xFFFF] (MOVHreg x)) => (MOVHZreg x)
|
||||||
|
|
||||||
(AND (MOVDconst [c]) y:(MOVWZreg _)) && c&0xFFFFFFFF == 0xFFFFFFFF => y
|
(AND (MOVDconst [c]) y:(MOVWZreg _)) && c&0xFFFFFFFF == 0xFFFFFFFF => y
|
||||||
(AND (MOVDconst [0xFFFFFFFF]) y:(MOVWreg x)) => (MOVWZreg x)
|
(AND (MOVDconst [0xFFFFFFFF]) y:(MOVWreg x)) => (MOVWZreg x)
|
||||||
// normal case
|
// normal case
|
||||||
(ANDconst [c] (MOV(B|BZ)reg x)) => (ANDconst [c&0xFF] x)
|
(ANDconst [c] (MOVBZreg x)) => (ANDconst [c&0xFF] x)
|
||||||
(ANDconst [c] (MOV(H|HZ)reg x)) => (ANDconst [c&0xFFFF] x)
|
(ANDconst [c] (MOVHZreg x)) => (ANDconst [c&0xFFFF] x)
|
||||||
(ANDconst [c] (MOV(W|WZ)reg x)) => (ANDconst [c&0xFFFFFFFF] x)
|
(ANDconst [c] (MOVWZreg x)) => (ANDconst [c&0xFFFFFFFF] x)
|
||||||
|
|
||||||
// Eliminate unnecessary sign/zero extend following right shift
|
// Eliminate unnecessary sign/zero extend following right shift
|
||||||
(MOV(B|H|W)Zreg (SRWconst [c] (MOVBZreg x))) => (SRWconst [c] (MOVBZreg x))
|
(MOV(B|H|W)Zreg (SRWconst [c] (MOVBZreg x))) => (SRWconst [c] (MOVBZreg x))
|
||||||
|
@ -1175,10 +1175,9 @@ func rewriteValuePPC64_OpCondSelect(v *Value) bool {
|
|||||||
v_1 := v.Args[1]
|
v_1 := v.Args[1]
|
||||||
v_0 := v.Args[0]
|
v_0 := v.Args[0]
|
||||||
b := v.Block
|
b := v.Block
|
||||||
typ := &b.Func.Config.Types
|
|
||||||
// match: (CondSelect x y bool)
|
// match: (CondSelect x y bool)
|
||||||
// cond: flagArg(bool) == nil
|
// cond: flagArg(bool) == nil
|
||||||
// result: (ISEL [6] x y (Select1 <types.TypeFlags> (ANDCCconst [1] bool)))
|
// result: (ISEL [6] x y (ANDCCconst [1] bool))
|
||||||
for {
|
for {
|
||||||
x := v_0
|
x := v_0
|
||||||
y := v_1
|
y := v_1
|
||||||
@ -1188,11 +1187,9 @@ func rewriteValuePPC64_OpCondSelect(v *Value) bool {
|
|||||||
}
|
}
|
||||||
v.reset(OpPPC64ISEL)
|
v.reset(OpPPC64ISEL)
|
||||||
v.AuxInt = int32ToAuxInt(6)
|
v.AuxInt = int32ToAuxInt(6)
|
||||||
v0 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
|
v0 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.TypeFlags)
|
||||||
v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
|
v0.AuxInt = int64ToAuxInt(1)
|
||||||
v1.AuxInt = int64ToAuxInt(1)
|
v0.AddArg(bool)
|
||||||
v1.AddArg(bool)
|
|
||||||
v0.AddArg(v1)
|
|
||||||
v.AddArg3(x, y, v0)
|
v.AddArg3(x, y, v0)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
@ -4587,17 +4584,15 @@ func rewriteValuePPC64_OpPPC64ANDconst(v *Value) bool {
|
|||||||
v.copyOf(y)
|
v.copyOf(y)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
// match: (ANDconst [0xFF] y:(MOVBreg _))
|
// match: (ANDconst [0xFF] (MOVBreg x))
|
||||||
// result: y
|
// result: (MOVBZreg x)
|
||||||
for {
|
for {
|
||||||
if auxIntToInt64(v.AuxInt) != 0xFF {
|
if auxIntToInt64(v.AuxInt) != 0xFF || v_0.Op != OpPPC64MOVBreg {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
y := v_0
|
x := v_0.Args[0]
|
||||||
if y.Op != OpPPC64MOVBreg {
|
v.reset(OpPPC64MOVBZreg)
|
||||||
break
|
v.AddArg(x)
|
||||||
}
|
|
||||||
v.copyOf(y)
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
// match: (ANDconst [c] y:(MOVHZreg _))
|
// match: (ANDconst [c] y:(MOVHZreg _))
|
||||||
@ -4612,29 +4607,14 @@ func rewriteValuePPC64_OpPPC64ANDconst(v *Value) bool {
|
|||||||
v.copyOf(y)
|
v.copyOf(y)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
// match: (ANDconst [0xFFFF] y:(MOVHreg _))
|
// match: (ANDconst [0xFFFF] (MOVHreg x))
|
||||||
// result: y
|
// result: (MOVHZreg x)
|
||||||
for {
|
for {
|
||||||
if auxIntToInt64(v.AuxInt) != 0xFFFF {
|
if auxIntToInt64(v.AuxInt) != 0xFFFF || v_0.Op != OpPPC64MOVHreg {
|
||||||
break
|
|
||||||
}
|
|
||||||
y := v_0
|
|
||||||
if y.Op != OpPPC64MOVHreg {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
v.copyOf(y)
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
// match: (ANDconst [c] (MOVBreg x))
|
|
||||||
// result: (ANDconst [c&0xFF] x)
|
|
||||||
for {
|
|
||||||
c := auxIntToInt64(v.AuxInt)
|
|
||||||
if v_0.Op != OpPPC64MOVBreg {
|
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
x := v_0.Args[0]
|
x := v_0.Args[0]
|
||||||
v.reset(OpPPC64ANDconst)
|
v.reset(OpPPC64MOVHZreg)
|
||||||
v.AuxInt = int64ToAuxInt(c & 0xFF)
|
|
||||||
v.AddArg(x)
|
v.AddArg(x)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
@ -4651,19 +4631,6 @@ func rewriteValuePPC64_OpPPC64ANDconst(v *Value) bool {
|
|||||||
v.AddArg(x)
|
v.AddArg(x)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
// match: (ANDconst [c] (MOVHreg x))
|
|
||||||
// result: (ANDconst [c&0xFFFF] x)
|
|
||||||
for {
|
|
||||||
c := auxIntToInt64(v.AuxInt)
|
|
||||||
if v_0.Op != OpPPC64MOVHreg {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
x := v_0.Args[0]
|
|
||||||
v.reset(OpPPC64ANDconst)
|
|
||||||
v.AuxInt = int64ToAuxInt(c & 0xFFFF)
|
|
||||||
v.AddArg(x)
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
// match: (ANDconst [c] (MOVHZreg x))
|
// match: (ANDconst [c] (MOVHZreg x))
|
||||||
// result: (ANDconst [c&0xFFFF] x)
|
// result: (ANDconst [c&0xFFFF] x)
|
||||||
for {
|
for {
|
||||||
@ -4677,19 +4644,6 @@ func rewriteValuePPC64_OpPPC64ANDconst(v *Value) bool {
|
|||||||
v.AddArg(x)
|
v.AddArg(x)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
// match: (ANDconst [c] (MOVWreg x))
|
|
||||||
// result: (ANDconst [c&0xFFFFFFFF] x)
|
|
||||||
for {
|
|
||||||
c := auxIntToInt64(v.AuxInt)
|
|
||||||
if v_0.Op != OpPPC64MOVWreg {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
x := v_0.Args[0]
|
|
||||||
v.reset(OpPPC64ANDconst)
|
|
||||||
v.AuxInt = int64ToAuxInt(c & 0xFFFFFFFF)
|
|
||||||
v.AddArg(x)
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
// match: (ANDconst [c] (MOVWZreg x))
|
// match: (ANDconst [c] (MOVWZreg x))
|
||||||
// result: (ANDconst [c&0xFFFFFFFF] x)
|
// result: (ANDconst [c&0xFFFFFFFF] x)
|
||||||
for {
|
for {
|
||||||
@ -5934,7 +5888,7 @@ func rewriteValuePPC64_OpPPC64ISEL(v *Value) bool {
|
|||||||
v.AddArg(y)
|
v.AddArg(y)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
// match: (ISEL [6] x y (Select1 (ANDCCconst [1] (ISELB [c] one cmp))))
|
// match: (ISEL [6] x y (ANDCCconst [1] (ISELB [c] one cmp)))
|
||||||
// result: (ISEL [c] x y cmp)
|
// result: (ISEL [c] x y cmp)
|
||||||
for {
|
for {
|
||||||
if auxIntToInt32(v.AuxInt) != 6 {
|
if auxIntToInt32(v.AuxInt) != 6 {
|
||||||
@ -5942,19 +5896,15 @@ func rewriteValuePPC64_OpPPC64ISEL(v *Value) bool {
|
|||||||
}
|
}
|
||||||
x := v_0
|
x := v_0
|
||||||
y := v_1
|
y := v_1
|
||||||
if v_2.Op != OpSelect1 {
|
if v_2.Op != OpPPC64ANDCCconst || auxIntToInt64(v_2.AuxInt) != 1 {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
v_2_0 := v_2.Args[0]
|
v_2_0 := v_2.Args[0]
|
||||||
if v_2_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_2_0.AuxInt) != 1 {
|
if v_2_0.Op != OpPPC64ISELB {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
v_2_0_0 := v_2_0.Args[0]
|
c := auxIntToInt32(v_2_0.AuxInt)
|
||||||
if v_2_0_0.Op != OpPPC64ISELB {
|
cmp := v_2_0.Args[1]
|
||||||
break
|
|
||||||
}
|
|
||||||
c := auxIntToInt32(v_2_0_0.AuxInt)
|
|
||||||
cmp := v_2_0_0.Args[1]
|
|
||||||
v.reset(OpPPC64ISEL)
|
v.reset(OpPPC64ISEL)
|
||||||
v.AuxInt = int32ToAuxInt(c)
|
v.AuxInt = int32ToAuxInt(c)
|
||||||
v.AddArg3(x, y, cmp)
|
v.AddArg3(x, y, cmp)
|
||||||
|
@ -363,3 +363,27 @@ func issue48467(x, y uint64) uint64 {
|
|||||||
d, borrow := bits.Sub64(x, y, 0)
|
d, borrow := bits.Sub64(x, y, 0)
|
||||||
return x - d&(-borrow)
|
return x - d&(-borrow)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Verify sign-extended values are not zero-extended under a bit mask (#61297)
|
||||||
|
func signextendAndMask8to64(a int8) (s, z uint64) {
|
||||||
|
// ppc64: "MOVB", "ANDCC\t[$]1015,"
|
||||||
|
// ppc64le: "MOVB", "ANDCC\t[$]1015,"
|
||||||
|
s = uint64(a) & 0x3F7
|
||||||
|
// ppc64: -"MOVB", "ANDCC\t[$]247,"
|
||||||
|
// ppc64le: -"MOVB", "ANDCC\t[$]247,"
|
||||||
|
z = uint64(uint8(a)) & 0x3F7
|
||||||
|
return
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify zero-extended values are not sign-extended under a bit mask (#61297)
|
||||||
|
func zeroextendAndMask8to64(a int8, b int16) (x, y uint64) {
|
||||||
|
// ppc64: -"MOVB\t", -"ANDCC", "MOVBZ"
|
||||||
|
// ppc64le: -"MOVB\t", -"ANDCC", "MOVBZ"
|
||||||
|
x = uint64(a) & 0xFF
|
||||||
|
// ppc64: -"MOVH\t", -"ANDCC", "MOVHZ"
|
||||||
|
// ppc64le: -"MOVH\t", -"ANDCC", "MOVHZ"
|
||||||
|
y = uint64(b) & 0xFFFF
|
||||||
|
return
|
||||||
|
|
||||||
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user