1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2025-01-31 20:51:52 +01:00

[InstCombine] try to fold 'add+sub' to 'not+add'

These are reassociated versions of the same pattern and
similar transforms as in rL338200 and rL338118.

The motivation is identical to those commits:
Patterns with add/sub combos can be improved using
'not' ops. This is better for analysis and may lead
to follow-on transforms because 'xor' and 'add' are
commutative/associative. It can also help codegen.

llvm-svn: 338221
This commit is contained in:
Sanjay Patel 2018-07-29 18:13:16 +00:00
parent d4ae78117b
commit cfa7bdf02e
2 changed files with 16 additions and 8 deletions

View File

@ -1614,6 +1614,14 @@ Instruction *InstCombiner::visitSub(BinaryOperator &I) {
if (match(Op0, m_Not(m_Value(X))) && match(Op1, m_Not(m_Value(Y))))
return BinaryOperator::CreateSub(Y, X);
// (X + -1) - Y --> ~Y + X
if (match(Op0, m_OneUse(m_Add(m_Value(X), m_AllOnes()))))
return BinaryOperator::CreateAdd(Builder.CreateNot(Op1), X);
// Y - (X + 1) --> ~X + Y
if (match(Op1, m_OneUse(m_Add(m_Value(X), m_One()))))
return BinaryOperator::CreateAdd(Builder.CreateNot(X), Op0);
if (Constant *C = dyn_cast<Constant>(Op0)) {
bool IsNegate = match(C, m_ZeroInt());
Value *X;

View File

@ -75,8 +75,8 @@ define <2 x i8> @dec_sub_vec(<2 x i8> %x, <2 x i8> %y) {
define i8 @sub_inc(i8 %x, i8 %y) {
; CHECK-LABEL: @sub_inc(
; CHECK-NEXT: [[S:%.*]] = add i8 [[X:%.*]], 1
; CHECK-NEXT: [[R:%.*]] = sub i8 [[Y:%.*]], [[S]]
; CHECK-NEXT: [[TMP1:%.*]] = xor i8 [[X:%.*]], -1
; CHECK-NEXT: [[R:%.*]] = add i8 [[TMP1]], [[Y:%.*]]
; CHECK-NEXT: ret i8 [[R]]
;
%s = add i8 %x, 1
@ -99,8 +99,8 @@ define i8 @sub_inc_extra_use(i8 %x, i8 %y) {
define <2 x i8> @sub_inc_vec(<2 x i8> %x, <2 x i8> %y) {
; CHECK-LABEL: @sub_inc_vec(
; CHECK-NEXT: [[S:%.*]] = add <2 x i8> [[X:%.*]], <i8 undef, i8 1>
; CHECK-NEXT: [[R:%.*]] = sub <2 x i8> [[Y:%.*]], [[S]]
; CHECK-NEXT: [[TMP1:%.*]] = xor <2 x i8> [[X:%.*]], <i8 -1, i8 -1>
; CHECK-NEXT: [[R:%.*]] = add <2 x i8> [[TMP1]], [[Y:%.*]]
; CHECK-NEXT: ret <2 x i8> [[R]]
;
%s = add <2 x i8> %x, <i8 undef, i8 1>
@ -110,8 +110,8 @@ define <2 x i8> @sub_inc_vec(<2 x i8> %x, <2 x i8> %y) {
define i8 @sub_dec(i8 %x, i8 %y) {
; CHECK-LABEL: @sub_dec(
; CHECK-NEXT: [[S:%.*]] = add i8 [[X:%.*]], -1
; CHECK-NEXT: [[R:%.*]] = sub i8 [[S]], [[Y:%.*]]
; CHECK-NEXT: [[TMP1:%.*]] = xor i8 [[Y:%.*]], -1
; CHECK-NEXT: [[R:%.*]] = add i8 [[TMP1]], [[X:%.*]]
; CHECK-NEXT: ret i8 [[R]]
;
%s = add i8 %x, -1
@ -134,8 +134,8 @@ define i8 @sub_dec_extra_use(i8 %x, i8 %y) {
define <2 x i8> @sub_dec_vec(<2 x i8> %x, <2 x i8> %y) {
; CHECK-LABEL: @sub_dec_vec(
; CHECK-NEXT: [[S:%.*]] = add <2 x i8> [[X:%.*]], <i8 undef, i8 -1>
; CHECK-NEXT: [[R:%.*]] = sub <2 x i8> [[S]], [[Y:%.*]]
; CHECK-NEXT: [[TMP1:%.*]] = xor <2 x i8> [[Y:%.*]], <i8 -1, i8 -1>
; CHECK-NEXT: [[R:%.*]] = add <2 x i8> [[TMP1]], [[X:%.*]]
; CHECK-NEXT: ret <2 x i8> [[R]]
;
%s = add <2 x i8> %x, <i8 undef, i8 -1>