1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2024-11-22 02:33:06 +01:00

[VP] make getFunctionalOpcode return an Optional

The operation of some VP intrinsics do/will not map to regular
instruction opcodes.  Returning 'None' seems more intuitive here than
'Instruction::Call'.

Reviewed By: frasercrmck

Differential Revision: https://reviews.llvm.org/D102778
This commit is contained in:
Simon Moll 2021-05-19 17:08:20 +02:00
parent 5ae5cfdb6b
commit 0dc8431dd3
4 changed files with 16 additions and 15 deletions

View File

@ -417,12 +417,12 @@ public:
}
// Equivalent non-predicated opcode
unsigned getFunctionalOpcode() const {
Optional<unsigned> getFunctionalOpcode() const {
return GetFunctionalOpcodeForVP(getIntrinsicID());
}
// Equivalent non-predicated opcode
static unsigned GetFunctionalOpcodeForVP(Intrinsic::ID ID);
static Optional<unsigned> GetFunctionalOpcodeForVP(Intrinsic::ID ID);
};
/// This is the common base class for constrained floating point intrinsics.

View File

@ -217,7 +217,7 @@ CachingVPExpander::expandPredicationInBinaryOperator(IRBuilder<> &Builder,
VPI.canIgnoreVectorLengthParam()) &&
"Implicitly dropping %evl in non-speculatable operator!");
auto OC = static_cast<Instruction::BinaryOps>(VPI.getFunctionalOpcode());
auto OC = static_cast<Instruction::BinaryOps>(*VPI.getFunctionalOpcode());
assert(Instruction::isBinaryOp(OC));
Value *Op0 = VPI.getOperand(0);
@ -316,9 +316,9 @@ Value *CachingVPExpander::expandPredication(VPIntrinsic &VPI) {
IRBuilder<> Builder(&VPI);
// Try lowering to a LLVM instruction first.
unsigned OC = VPI.getFunctionalOpcode();
auto OC = VPI.getFunctionalOpcode();
if (Instruction::isBinaryOp(OC))
if (OC && Instruction::isBinaryOp(*OC))
return expandPredicationInBinaryOperator(Builder, VPI);
return &VPI;

View File

@ -317,8 +317,8 @@ bool VPIntrinsic::IsVPIntrinsic(Intrinsic::ID ID) {
}
// Equivalent non-predicated opcode
unsigned VPIntrinsic::GetFunctionalOpcodeForVP(Intrinsic::ID ID) {
unsigned FunctionalOC = Instruction::Call;
Optional<unsigned> VPIntrinsic::GetFunctionalOpcodeForVP(Intrinsic::ID ID) {
Optional<unsigned> FunctionalOC;
switch (ID) {
default:
break;

View File

@ -183,16 +183,17 @@ TEST_F(VPIntrinsicTest, OpcodeRoundTrip) {
unsigned FullTripCounts = 0;
for (unsigned OC : Opcodes) {
Intrinsic::ID VPID = VPIntrinsic::GetForOpcode(OC);
// no equivalent VP intrinsic available
// No equivalent VP intrinsic available.
if (VPID == Intrinsic::not_intrinsic)
continue;
unsigned RoundTripOC = VPIntrinsic::GetFunctionalOpcodeForVP(VPID);
// no equivalent Opcode available
if (RoundTripOC == Instruction::Call)
Optional<unsigned> RoundTripOC =
VPIntrinsic::GetFunctionalOpcodeForVP(VPID);
// No equivalent Opcode available.
if (!RoundTripOC)
continue;
ASSERT_EQ(RoundTripOC, OC);
ASSERT_EQ(*RoundTripOC, OC);
++FullTripCounts;
}
ASSERT_NE(FullTripCounts, 0u);
@ -207,13 +208,13 @@ TEST_F(VPIntrinsicTest, IntrinsicIDRoundTrip) {
unsigned FullTripCounts = 0;
for (const auto &VPDecl : *M) {
auto VPID = VPDecl.getIntrinsicID();
unsigned OC = VPIntrinsic::GetFunctionalOpcodeForVP(VPID);
Optional<unsigned> OC = VPIntrinsic::GetFunctionalOpcodeForVP(VPID);
// no equivalent Opcode available
if (OC == Instruction::Call)
if (!OC)
continue;
Intrinsic::ID RoundTripVPID = VPIntrinsic::GetForOpcode(OC);
Intrinsic::ID RoundTripVPID = VPIntrinsic::GetForOpcode(*OC);
ASSERT_EQ(RoundTripVPID, VPID);
++FullTripCounts;