1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2025-01-31 20:51:52 +01:00

[InstCombine] fold min/max intrinsics with not ops

This is a partial translation of the existing select-based
folds. We need to recreate several different transforms to
avoid regressions as noted in D98152.

https://alive2.llvm.org/ce/z/teuZ_J
This commit is contained in:
Sanjay Patel 2021-03-09 08:14:30 -05:00
parent d394018ed5
commit ba30a3aa71
2 changed files with 25 additions and 10 deletions

View File

@ -872,6 +872,7 @@ Instruction *InstCombinerImpl::visitCallInst(CallInst &CI) {
Value *NarrowMaxMin = Builder.CreateBinaryIntrinsic(IID, X, Y);
return CastInst::Create(Instruction::SExt, NarrowMaxMin, II->getType());
}
Constant *C;
if (match(I0, m_SExt(m_Value(X))) && match(I1, m_Constant(C)) &&
I0->hasOneUse()) {
@ -881,6 +882,14 @@ Instruction *InstCombinerImpl::visitCallInst(CallInst &CI) {
return CastInst::Create(Instruction::SExt, NarrowMaxMin, II->getType());
}
}
if (match(I0, m_Not(m_Value(X))) && match(I1, m_Not(m_Value(Y))) &&
(I0->hasOneUse() || I1->hasOneUse())) {
Value *InvMaxMin =
Builder.CreateBinaryIntrinsic(getInverseMinMaxIntrinsic(IID), X, Y);
return BinaryOperator::CreateNot(InvMaxMin);
}
break;
}
case Intrinsic::bswap: {

View File

@ -380,9 +380,8 @@ define i8 @umin_zext_constanti_uses(i5 %x) {
define i8 @smax_of_nots(i8 %x, i8 %y) {
; CHECK-LABEL: @smax_of_nots(
; CHECK-NEXT: [[NOTX:%.*]] = xor i8 [[X:%.*]], -1
; CHECK-NEXT: [[NOTY:%.*]] = xor i8 [[Y:%.*]], -1
; CHECK-NEXT: [[M:%.*]] = call i8 @llvm.smax.i8(i8 [[NOTX]], i8 [[NOTY]])
; CHECK-NEXT: [[TMP1:%.*]] = call i8 @llvm.smin.i8(i8 [[X:%.*]], i8 [[Y:%.*]])
; CHECK-NEXT: [[M:%.*]] = xor i8 [[TMP1]], -1
; CHECK-NEXT: ret i8 [[M]]
;
%notx = xor i8 %x, -1
@ -391,11 +390,12 @@ define i8 @smax_of_nots(i8 %x, i8 %y) {
ret i8 %m
}
; Vectors are ok (including undef lanes of not ops)
define <3 x i8> @smin_of_nots(<3 x i8> %x, <3 x i8> %y) {
; CHECK-LABEL: @smin_of_nots(
; CHECK-NEXT: [[NOTX:%.*]] = xor <3 x i8> [[X:%.*]], <i8 -1, i8 undef, i8 -1>
; CHECK-NEXT: [[NOTY:%.*]] = xor <3 x i8> [[Y:%.*]], <i8 -1, i8 -1, i8 undef>
; CHECK-NEXT: [[M:%.*]] = call <3 x i8> @llvm.smin.v3i8(<3 x i8> [[NOTX]], <3 x i8> [[NOTY]])
; CHECK-NEXT: [[TMP1:%.*]] = call <3 x i8> @llvm.smax.v3i8(<3 x i8> [[X:%.*]], <3 x i8> [[Y:%.*]])
; CHECK-NEXT: [[M:%.*]] = xor <3 x i8> [[TMP1]], <i8 -1, i8 -1, i8 -1>
; CHECK-NEXT: ret <3 x i8> [[M]]
;
%notx = xor <3 x i8> %x, <i8 -1, i8 undef, i8 -1>
@ -404,12 +404,14 @@ define <3 x i8> @smin_of_nots(<3 x i8> %x, <3 x i8> %y) {
ret <3 x i8> %m
}
; An extra use is ok.
define i8 @umax_of_nots(i8 %x, i8 %y) {
; CHECK-LABEL: @umax_of_nots(
; CHECK-NEXT: [[NOTX:%.*]] = xor i8 [[X:%.*]], -1
; CHECK-NEXT: call void @use(i8 [[NOTX]])
; CHECK-NEXT: [[NOTY:%.*]] = xor i8 [[Y:%.*]], -1
; CHECK-NEXT: [[M:%.*]] = call i8 @llvm.umax.i8(i8 [[NOTX]], i8 [[NOTY]])
; CHECK-NEXT: [[TMP1:%.*]] = call i8 @llvm.umin.i8(i8 [[X]], i8 [[Y:%.*]])
; CHECK-NEXT: [[M:%.*]] = xor i8 [[TMP1]], -1
; CHECK-NEXT: ret i8 [[M]]
;
%notx = xor i8 %x, -1
@ -419,12 +421,14 @@ define i8 @umax_of_nots(i8 %x, i8 %y) {
ret i8 %m
}
; An extra use is ok.
define i8 @umin_of_nots(i8 %x, i8 %y) {
; CHECK-LABEL: @umin_of_nots(
; CHECK-NEXT: [[NOTX:%.*]] = xor i8 [[X:%.*]], -1
; CHECK-NEXT: [[NOTY:%.*]] = xor i8 [[Y:%.*]], -1
; CHECK-NEXT: call void @use(i8 [[NOTY]])
; CHECK-NEXT: [[M:%.*]] = call i8 @llvm.umin.i8(i8 [[NOTX]], i8 [[NOTY]])
; CHECK-NEXT: [[TMP1:%.*]] = call i8 @llvm.umax.i8(i8 [[X:%.*]], i8 [[Y]])
; CHECK-NEXT: [[M:%.*]] = xor i8 [[TMP1]], -1
; CHECK-NEXT: ret i8 [[M]]
;
%notx = xor i8 %x, -1
@ -434,6 +438,8 @@ define i8 @umin_of_nots(i8 %x, i8 %y) {
ret i8 %m
}
; Negative test - too many uses
define i8 @umin_of_nots_uses(i8 %x, i8 %y) {
; CHECK-LABEL: @umin_of_nots_uses(
; CHECK-NEXT: [[NOTX:%.*]] = xor i8 [[X:%.*]], -1