mirror of
https://github.com/RPCS3/llvm-mirror.git
synced 2024-11-22 02:33:06 +01:00
[VP] make getFunctionalOpcode return an Optional
The operation of some VP intrinsics do/will not map to regular instruction opcodes. Returning 'None' seems more intuitive here than 'Instruction::Call'. Reviewed By: frasercrmck Differential Revision: https://reviews.llvm.org/D102778
This commit is contained in:
parent
5ae5cfdb6b
commit
0dc8431dd3
@ -417,12 +417,12 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Equivalent non-predicated opcode
|
// Equivalent non-predicated opcode
|
||||||
unsigned getFunctionalOpcode() const {
|
Optional<unsigned> getFunctionalOpcode() const {
|
||||||
return GetFunctionalOpcodeForVP(getIntrinsicID());
|
return GetFunctionalOpcodeForVP(getIntrinsicID());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Equivalent non-predicated opcode
|
// Equivalent non-predicated opcode
|
||||||
static unsigned GetFunctionalOpcodeForVP(Intrinsic::ID ID);
|
static Optional<unsigned> GetFunctionalOpcodeForVP(Intrinsic::ID ID);
|
||||||
};
|
};
|
||||||
|
|
||||||
/// This is the common base class for constrained floating point intrinsics.
|
/// This is the common base class for constrained floating point intrinsics.
|
||||||
|
@ -217,7 +217,7 @@ CachingVPExpander::expandPredicationInBinaryOperator(IRBuilder<> &Builder,
|
|||||||
VPI.canIgnoreVectorLengthParam()) &&
|
VPI.canIgnoreVectorLengthParam()) &&
|
||||||
"Implicitly dropping %evl in non-speculatable operator!");
|
"Implicitly dropping %evl in non-speculatable operator!");
|
||||||
|
|
||||||
auto OC = static_cast<Instruction::BinaryOps>(VPI.getFunctionalOpcode());
|
auto OC = static_cast<Instruction::BinaryOps>(*VPI.getFunctionalOpcode());
|
||||||
assert(Instruction::isBinaryOp(OC));
|
assert(Instruction::isBinaryOp(OC));
|
||||||
|
|
||||||
Value *Op0 = VPI.getOperand(0);
|
Value *Op0 = VPI.getOperand(0);
|
||||||
@ -316,9 +316,9 @@ Value *CachingVPExpander::expandPredication(VPIntrinsic &VPI) {
|
|||||||
IRBuilder<> Builder(&VPI);
|
IRBuilder<> Builder(&VPI);
|
||||||
|
|
||||||
// Try lowering to a LLVM instruction first.
|
// Try lowering to a LLVM instruction first.
|
||||||
unsigned OC = VPI.getFunctionalOpcode();
|
auto OC = VPI.getFunctionalOpcode();
|
||||||
|
|
||||||
if (Instruction::isBinaryOp(OC))
|
if (OC && Instruction::isBinaryOp(*OC))
|
||||||
return expandPredicationInBinaryOperator(Builder, VPI);
|
return expandPredicationInBinaryOperator(Builder, VPI);
|
||||||
|
|
||||||
return &VPI;
|
return &VPI;
|
||||||
|
@ -317,8 +317,8 @@ bool VPIntrinsic::IsVPIntrinsic(Intrinsic::ID ID) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Equivalent non-predicated opcode
|
// Equivalent non-predicated opcode
|
||||||
unsigned VPIntrinsic::GetFunctionalOpcodeForVP(Intrinsic::ID ID) {
|
Optional<unsigned> VPIntrinsic::GetFunctionalOpcodeForVP(Intrinsic::ID ID) {
|
||||||
unsigned FunctionalOC = Instruction::Call;
|
Optional<unsigned> FunctionalOC;
|
||||||
switch (ID) {
|
switch (ID) {
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
|
@ -183,16 +183,17 @@ TEST_F(VPIntrinsicTest, OpcodeRoundTrip) {
|
|||||||
unsigned FullTripCounts = 0;
|
unsigned FullTripCounts = 0;
|
||||||
for (unsigned OC : Opcodes) {
|
for (unsigned OC : Opcodes) {
|
||||||
Intrinsic::ID VPID = VPIntrinsic::GetForOpcode(OC);
|
Intrinsic::ID VPID = VPIntrinsic::GetForOpcode(OC);
|
||||||
// no equivalent VP intrinsic available
|
// No equivalent VP intrinsic available.
|
||||||
if (VPID == Intrinsic::not_intrinsic)
|
if (VPID == Intrinsic::not_intrinsic)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
unsigned RoundTripOC = VPIntrinsic::GetFunctionalOpcodeForVP(VPID);
|
Optional<unsigned> RoundTripOC =
|
||||||
// no equivalent Opcode available
|
VPIntrinsic::GetFunctionalOpcodeForVP(VPID);
|
||||||
if (RoundTripOC == Instruction::Call)
|
// No equivalent Opcode available.
|
||||||
|
if (!RoundTripOC)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
ASSERT_EQ(RoundTripOC, OC);
|
ASSERT_EQ(*RoundTripOC, OC);
|
||||||
++FullTripCounts;
|
++FullTripCounts;
|
||||||
}
|
}
|
||||||
ASSERT_NE(FullTripCounts, 0u);
|
ASSERT_NE(FullTripCounts, 0u);
|
||||||
@ -207,13 +208,13 @@ TEST_F(VPIntrinsicTest, IntrinsicIDRoundTrip) {
|
|||||||
unsigned FullTripCounts = 0;
|
unsigned FullTripCounts = 0;
|
||||||
for (const auto &VPDecl : *M) {
|
for (const auto &VPDecl : *M) {
|
||||||
auto VPID = VPDecl.getIntrinsicID();
|
auto VPID = VPDecl.getIntrinsicID();
|
||||||
unsigned OC = VPIntrinsic::GetFunctionalOpcodeForVP(VPID);
|
Optional<unsigned> OC = VPIntrinsic::GetFunctionalOpcodeForVP(VPID);
|
||||||
|
|
||||||
// no equivalent Opcode available
|
// no equivalent Opcode available
|
||||||
if (OC == Instruction::Call)
|
if (!OC)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
Intrinsic::ID RoundTripVPID = VPIntrinsic::GetForOpcode(OC);
|
Intrinsic::ID RoundTripVPID = VPIntrinsic::GetForOpcode(*OC);
|
||||||
|
|
||||||
ASSERT_EQ(RoundTripVPID, VPID);
|
ASSERT_EQ(RoundTripVPID, VPID);
|
||||||
++FullTripCounts;
|
++FullTripCounts;
|
||||||
|
Loading…
Reference in New Issue
Block a user