1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2024-11-22 02:33:06 +01:00

[GlobalISel] Don't skip adding predicate matcher

This patch fixes a bug which skipped
adding predicate matcher for a pattern in many cases.
For example, if predicate is Load and
its memoryVT is non-null then the loop
continues and never reaches to the end which
adds the predicate matcher. This patch moves the
matcher addition to the top of the loop
so that it gets added regardless of contextual checks
later in the loop.
Other way to fix this issue is to remove all "continue" statements
in checks and let the loop continue till end.

Reviewed By: arsenm

Differential Revision: https://reviews.llvm.org/D83034
This commit is contained in:
madhur13490 2020-07-02 09:08:06 +00:00
parent 1da65b5ff0
commit 2779613cc9
2 changed files with 230 additions and 139 deletions

View File

@ -0,0 +1,74 @@
// RUN: llvm-tblgen -gen-global-isel -I %p/../../include -I %p/Common -optimize-match-table=false %s -o %T/context-non-optimized.cpp
// RUN: FileCheck %s --check-prefixes=CHECK_NOPT -input-file=%T/context-non-optimized.cpp
// RUN: llvm-tblgen -gen-global-isel -I %p/../../include -I %p/Common -optimize-match-table=true %s -o %T/context-optimized.cpp
// RUN: FileCheck %s --check-prefixes=CHECK_OPT -input-file=%T/context-optimized.cpp
include "llvm/Target/Target.td"
include "GlobalISelEmitterCommon.td"
def test_atomic_op_frag : PatFrag<(ops node:$ptr, node:$val),
(atomic_swap node:$ptr, node:$val)> {
let GISelPredicateCode = [{ return !MRI.use_nodbg_empty(MI.getOperand(0).getReg()); }];
let IsAtomic = 1;
let MemoryVT = i32;
}
def INSN : I<(outs GPR32:$dst), (ins GPR32Op:$src1, GPR32Op:$src2), []>;
def : Pat<(test_atomic_op_frag GPR32:$ptr, GPR32:$val) ,
(INSN GPR32:$ptr, GPR32:$val)>;
// CHECK_NOPT-LABEL: const int64_t *MyTargetInstructionSelector::getMatchTable() const {
// CHECK_NOPT-NEXT: constexpr static int64_t MatchTable0[] = {
// CHECK_NOPT-NEXT: GIM_Try, /*On fail goto*//*Label 0*/ 46, // Rule ID 0 //
// CHECK_NOPT-NEXT: GIM_CheckNumOperands, /*MI*/0, /*Expected*/3,
// CHECK_NOPT-NEXT: GIM_CheckOpcode, /*MI*/0, TargetOpcode::G_ATOMICRMW_XCHG,
// CHECK_NOPT-NEXT: GIM_CheckMemorySizeEqualTo, /*MI*/0, /*MMO*/0, /*Size*/4,
// CHECK_NOPT-NEXT: // MIs[0] dst
// CHECK_NOPT-NEXT: GIM_CheckType, /*MI*/0, /*Op*/0, /*Type*/GILLT_s32,
// CHECK_NOPT-NEXT: GIM_CheckRegBankForClass, /*MI*/0, /*Op*/0, /*RC*/MyTarget::GPR32RegClassID,
// CHECK_NOPT-NEXT: // MIs[0] ptr
// CHECK_NOPT-NEXT: GIM_CheckPointerToAny, /*MI*/0, /*Op*/1, /*SizeInBits*/32,
// CHECK_NOPT-NEXT: GIM_CheckRegBankForClass, /*MI*/0, /*Op*/1, /*RC*/MyTarget::GPR32RegClassID,
// CHECK_NOPT-NEXT: // MIs[0] val
// CHECK_NOPT-NEXT: GIM_CheckType, /*MI*/0, /*Op*/2, /*Type*/GILLT_s32,
// CHECK_NOPT-NEXT: GIM_CheckRegBankForClass, /*MI*/0, /*Op*/2, /*RC*/MyTarget::GPR32RegClassID,
// CHECK_NOPT-NEXT: GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIPFP_MI_Predicate_test_atomic_op_frag,
// CHECK_NOPT-NEXT: // (atomic_swap:{ *:[i32] } GPR32:{ *:[i32] }:$ptr, GPR32:{ *:[i32] }:$val)<<P:Predicate_test_atomic_op_frag>> => (INSN:{ *:[i32] } GPR32:{ *:[i32] }:$ptr, GPR32:{ *:[i32] }:$val)
// CHECK_NOPT-NEXT: GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/MyTarget::INSN,
// CHECK_NOPT-NEXT: GIR_ConstrainSelectedInstOperands, /*InsnID*/0,
// CHECK_NOPT-NEXT: // GIR_Coverage, 0,
// CHECK_NOPT-NEXT: GIR_Done,
// CHECK_NOPT-NEXT: // Label 0: @46
// CHECK_NOPT-NEXT: GIM_Reject,
// CHECK_NOPT-NEXT: };
// CHECK_NOPT-NEXT: return MatchTable0;
// CHECK_NOPT-NEXT: }
//
//
// CHECK_OPT-LABEL: const int64_t *MyTargetInstructionSelector::getMatchTable() const {
// CHECK_OPT-NEXT: constexpr static int64_t MatchTable0[] = {
// CHECK_OPT-NEXT: GIM_Try, /*On fail goto*//*Label 0*/ 43, // Rule ID 0 //
// CHECK_OPT-NEXT: GIM_CheckOpcode, /*MI*/0, TargetOpcode::G_ATOMICRMW_XCHG,
// CHECK_OPT-NEXT: GIM_CheckType, /*MI*/0, /*Op*/0, /*Type*/GILLT_s32,
// CHECK_OPT-NEXT: GIM_CheckType, /*MI*/0, /*Op*/2, /*Type*/GILLT_s32,
// CHECK_OPT-NEXT: GIM_CheckMemorySizeEqualTo, /*MI*/0, /*MMO*/0, /*Size*/4,
// CHECK_OPT-NEXT: GIM_CheckRegBankForClass, /*MI*/0, /*Op*/0, /*RC*/MyTarget::GPR32RegClassID,
// CHECK_OPT-NEXT: // MIs[0] ptr
// CHECK_OPT-NEXT: GIM_CheckPointerToAny, /*MI*/0, /*Op*/1, /*SizeInBits*/32,
// CHECK_OPT-NEXT: GIM_CheckRegBankForClass, /*MI*/0, /*Op*/1, /*RC*/MyTarget::GPR32RegClassID,
// CHECK_OPT-NEXT: GIM_CheckRegBankForClass, /*MI*/0, /*Op*/2, /*RC*/MyTarget::GPR32RegClassID,
// CHECK_OPT-NEXT: GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIPFP_MI_Predicate_test_atomic_op_frag,
// CHECK_OPT-NEXT: // (atomic_swap:{ *:[i32] } GPR32:{ *:[i32] }:$ptr, GPR32:{ *:[i32] }:$val)<<P:Predicate_test_atomic_op_frag>> => (INSN:{ *:[i32] } GPR32:{ *:[i32] }:$ptr, GPR32:{ *:[i32] }:$val)
// CHECK_OPT-NEXT: GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/MyTarget::INSN,
// CHECK_OPT-NEXT: GIR_ConstrainSelectedInstOperands, /*InsnID*/0,
// CHECK_OPT-NEXT: // GIR_Coverage, 0,
// CHECK_OPT-NEXT: GIR_Done,
// CHECK_OPT-NEXT: // Label 0: @43
// CHECK_OPT-NEXT: GIM_Reject,
// CHECK_OPT-NEXT: };
// CHECK_OPT-NEXT: return MatchTable0;
// CHECK_OPT-NEXT: }

View File

@ -3504,6 +3504,12 @@ private:
Optional<const CodeGenRegisterClass *>
inferRegClassFromPattern(TreePatternNode *N);
// Add builtin predicates.
Expected<InstructionMatcher &>
addBuiltinPredicates(const Record *SrcGIEquivOrNull,
const TreePredicateFn &Predicate,
InstructionMatcher &InsnMatcher, bool &HasAddedMatcher);
public:
/// Takes a sequence of \p Rules and group them based on the predicates
/// they share. \p MatcherStorage is used as a memory container
@ -3611,6 +3617,147 @@ GlobalISelEmitter::importRulePredicates(RuleMatcher &M,
return Error::success();
}
Expected<InstructionMatcher &> GlobalISelEmitter::addBuiltinPredicates(
const Record *SrcGIEquivOrNull, const TreePredicateFn &Predicate,
InstructionMatcher &InsnMatcher, bool &HasAddedMatcher) {
if (Predicate.isLoad() || Predicate.isStore() || Predicate.isAtomic()) {
if (const ListInit *AddrSpaces = Predicate.getAddressSpaces()) {
SmallVector<unsigned, 4> ParsedAddrSpaces;
for (Init *Val : AddrSpaces->getValues()) {
IntInit *IntVal = dyn_cast<IntInit>(Val);
if (!IntVal)
return failedImport("Address space is not an integer");
ParsedAddrSpaces.push_back(IntVal->getValue());
}
if (!ParsedAddrSpaces.empty()) {
InsnMatcher.addPredicate<MemoryAddressSpacePredicateMatcher>(
0, ParsedAddrSpaces);
}
}
int64_t MinAlign = Predicate.getMinAlignment();
if (MinAlign > 0)
InsnMatcher.addPredicate<MemoryAlignmentPredicateMatcher>(0, MinAlign);
}
// G_LOAD is used for both non-extending and any-extending loads.
if (Predicate.isLoad() && Predicate.isNonExtLoad()) {
InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
0, MemoryVsLLTSizePredicateMatcher::EqualTo, 0);
return InsnMatcher;
}
if (Predicate.isLoad() && Predicate.isAnyExtLoad()) {
InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
0, MemoryVsLLTSizePredicateMatcher::LessThan, 0);
return InsnMatcher;
}
if (Predicate.isStore()) {
if (Predicate.isTruncStore()) {
// FIXME: If MemoryVT is set, we end up with 2 checks for the MMO size.
InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
0, MemoryVsLLTSizePredicateMatcher::LessThan, 0);
return InsnMatcher;
}
if (Predicate.isNonTruncStore()) {
// We need to check the sizes match here otherwise we could incorrectly
// match truncating stores with non-truncating ones.
InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
0, MemoryVsLLTSizePredicateMatcher::EqualTo, 0);
}
}
// No check required. We already did it by swapping the opcode.
if (!SrcGIEquivOrNull->isValueUnset("IfSignExtend") &&
Predicate.isSignExtLoad())
return InsnMatcher;
// No check required. We already did it by swapping the opcode.
if (!SrcGIEquivOrNull->isValueUnset("IfZeroExtend") &&
Predicate.isZeroExtLoad())
return InsnMatcher;
// No check required. G_STORE by itself is a non-extending store.
if (Predicate.isNonTruncStore())
return InsnMatcher;
if (Predicate.isLoad() || Predicate.isStore() || Predicate.isAtomic()) {
if (Predicate.getMemoryVT() != nullptr) {
Optional<LLTCodeGen> MemTyOrNone =
MVTToLLT(getValueType(Predicate.getMemoryVT()));
if (!MemTyOrNone)
return failedImport("MemVT could not be converted to LLT");
// MMO's work in bytes so we must take care of unusual types like i1
// don't round down.
unsigned MemSizeInBits =
llvm::alignTo(MemTyOrNone->get().getSizeInBits(), 8);
InsnMatcher.addPredicate<MemorySizePredicateMatcher>(0,
MemSizeInBits / 8);
return InsnMatcher;
}
}
if (Predicate.isLoad() || Predicate.isStore()) {
// No check required. A G_LOAD/G_STORE is an unindexed load.
if (Predicate.isUnindexed())
return InsnMatcher;
}
if (Predicate.isAtomic()) {
if (Predicate.isAtomicOrderingMonotonic()) {
InsnMatcher.addPredicate<AtomicOrderingMMOPredicateMatcher>("Monotonic");
return InsnMatcher;
}
if (Predicate.isAtomicOrderingAcquire()) {
InsnMatcher.addPredicate<AtomicOrderingMMOPredicateMatcher>("Acquire");
return InsnMatcher;
}
if (Predicate.isAtomicOrderingRelease()) {
InsnMatcher.addPredicate<AtomicOrderingMMOPredicateMatcher>("Release");
return InsnMatcher;
}
if (Predicate.isAtomicOrderingAcquireRelease()) {
InsnMatcher.addPredicate<AtomicOrderingMMOPredicateMatcher>(
"AcquireRelease");
return InsnMatcher;
}
if (Predicate.isAtomicOrderingSequentiallyConsistent()) {
InsnMatcher.addPredicate<AtomicOrderingMMOPredicateMatcher>(
"SequentiallyConsistent");
return InsnMatcher;
}
}
if (Predicate.isAtomicOrderingAcquireOrStronger()) {
InsnMatcher.addPredicate<AtomicOrderingMMOPredicateMatcher>(
"Acquire", AtomicOrderingMMOPredicateMatcher::AO_OrStronger);
return InsnMatcher;
}
if (Predicate.isAtomicOrderingWeakerThanAcquire()) {
InsnMatcher.addPredicate<AtomicOrderingMMOPredicateMatcher>(
"Acquire", AtomicOrderingMMOPredicateMatcher::AO_WeakerThan);
return InsnMatcher;
}
if (Predicate.isAtomicOrderingReleaseOrStronger()) {
InsnMatcher.addPredicate<AtomicOrderingMMOPredicateMatcher>(
"Release", AtomicOrderingMMOPredicateMatcher::AO_OrStronger);
return InsnMatcher;
}
if (Predicate.isAtomicOrderingWeakerThanRelease()) {
InsnMatcher.addPredicate<AtomicOrderingMMOPredicateMatcher>(
"Release", AtomicOrderingMMOPredicateMatcher::AO_WeakerThan);
return InsnMatcher;
}
HasAddedMatcher = false;
return InsnMatcher;
}
Expected<InstructionMatcher &> GlobalISelEmitter::createAndImportSelDAGMatcher(
RuleMatcher &Rule, InstructionMatcher &InsnMatcher,
const TreePatternNode *Src, unsigned &TempOpIdx) {
@ -3652,6 +3799,7 @@ Expected<InstructionMatcher &> GlobalISelEmitter::createAndImportSelDAGMatcher(
for (const TreePredicateCall &Call : Src->getPredicateCalls()) {
const TreePredicateFn &Predicate = Call.Fn;
bool HasAddedBuiltinMatcher = true;
if (Predicate.isAlwaysTrue())
continue;
@ -3660,150 +3808,19 @@ Expected<InstructionMatcher &> GlobalISelEmitter::createAndImportSelDAGMatcher(
continue;
}
// An address space check is needed in all contexts if there is one.
if (Predicate.isLoad() || Predicate.isStore() || Predicate.isAtomic()) {
if (const ListInit *AddrSpaces = Predicate.getAddressSpaces()) {
SmallVector<unsigned, 4> ParsedAddrSpaces;
for (Init *Val : AddrSpaces->getValues()) {
IntInit *IntVal = dyn_cast<IntInit>(Val);
if (!IntVal)
return failedImport("Address space is not an integer");
ParsedAddrSpaces.push_back(IntVal->getValue());
}
if (!ParsedAddrSpaces.empty()) {
InsnMatcher.addPredicate<MemoryAddressSpacePredicateMatcher>(
0, ParsedAddrSpaces);
}
}
int64_t MinAlign = Predicate.getMinAlignment();
if (MinAlign > 0)
InsnMatcher.addPredicate<MemoryAlignmentPredicateMatcher>(0, MinAlign);
}
// G_LOAD is used for both non-extending and any-extending loads.
if (Predicate.isLoad() && Predicate.isNonExtLoad()) {
InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
0, MemoryVsLLTSizePredicateMatcher::EqualTo, 0);
continue;
}
if (Predicate.isLoad() && Predicate.isAnyExtLoad()) {
InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
0, MemoryVsLLTSizePredicateMatcher::LessThan, 0);
continue;
}
if (Predicate.isStore()) {
if (Predicate.isTruncStore()) {
// FIXME: If MemoryVT is set, we end up with 2 checks for the MMO size.
InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
0, MemoryVsLLTSizePredicateMatcher::LessThan, 0);
continue;
}
if (Predicate.isNonTruncStore()) {
// We need to check the sizes match here otherwise we could incorrectly
// match truncating stores with non-truncating ones.
InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
0, MemoryVsLLTSizePredicateMatcher::EqualTo, 0);
}
}
// No check required. We already did it by swapping the opcode.
if (!SrcGIEquivOrNull->isValueUnset("IfSignExtend") &&
Predicate.isSignExtLoad())
continue;
// No check required. We already did it by swapping the opcode.
if (!SrcGIEquivOrNull->isValueUnset("IfZeroExtend") &&
Predicate.isZeroExtLoad())
continue;
// No check required. G_STORE by itself is a non-extending store.
if (Predicate.isNonTruncStore())
continue;
if (Predicate.isLoad() || Predicate.isStore() || Predicate.isAtomic()) {
if (Predicate.getMemoryVT() != nullptr) {
Optional<LLTCodeGen> MemTyOrNone =
MVTToLLT(getValueType(Predicate.getMemoryVT()));
if (!MemTyOrNone)
return failedImport("MemVT could not be converted to LLT");
// MMO's work in bytes so we must take care of unusual types like i1
// don't round down.
unsigned MemSizeInBits =
llvm::alignTo(MemTyOrNone->get().getSizeInBits(), 8);
InsnMatcher.addPredicate<MemorySizePredicateMatcher>(
0, MemSizeInBits / 8);
continue;
}
}
if (Predicate.isLoad() || Predicate.isStore()) {
// No check required. A G_LOAD/G_STORE is an unindexed load.
if (Predicate.isUnindexed())
continue;
}
if (Predicate.isAtomic()) {
if (Predicate.isAtomicOrderingMonotonic()) {
InsnMatcher.addPredicate<AtomicOrderingMMOPredicateMatcher>(
"Monotonic");
continue;
}
if (Predicate.isAtomicOrderingAcquire()) {
InsnMatcher.addPredicate<AtomicOrderingMMOPredicateMatcher>("Acquire");
continue;
}
if (Predicate.isAtomicOrderingRelease()) {
InsnMatcher.addPredicate<AtomicOrderingMMOPredicateMatcher>("Release");
continue;
}
if (Predicate.isAtomicOrderingAcquireRelease()) {
InsnMatcher.addPredicate<AtomicOrderingMMOPredicateMatcher>(
"AcquireRelease");
continue;
}
if (Predicate.isAtomicOrderingSequentiallyConsistent()) {
InsnMatcher.addPredicate<AtomicOrderingMMOPredicateMatcher>(
"SequentiallyConsistent");
continue;
}
if (Predicate.isAtomicOrderingAcquireOrStronger()) {
InsnMatcher.addPredicate<AtomicOrderingMMOPredicateMatcher>(
"Acquire", AtomicOrderingMMOPredicateMatcher::AO_OrStronger);
continue;
}
if (Predicate.isAtomicOrderingWeakerThanAcquire()) {
InsnMatcher.addPredicate<AtomicOrderingMMOPredicateMatcher>(
"Acquire", AtomicOrderingMMOPredicateMatcher::AO_WeakerThan);
continue;
}
if (Predicate.isAtomicOrderingReleaseOrStronger()) {
InsnMatcher.addPredicate<AtomicOrderingMMOPredicateMatcher>(
"Release", AtomicOrderingMMOPredicateMatcher::AO_OrStronger);
continue;
}
if (Predicate.isAtomicOrderingWeakerThanRelease()) {
InsnMatcher.addPredicate<AtomicOrderingMMOPredicateMatcher>(
"Release", AtomicOrderingMMOPredicateMatcher::AO_WeakerThan);
continue;
}
}
auto InsnMatcherOrError = addBuiltinPredicates(
SrcGIEquivOrNull, Predicate, InsnMatcher, HasAddedBuiltinMatcher);
if (auto Error = InsnMatcherOrError.takeError())
return std::move(Error);
if (Predicate.hasGISelPredicateCode()) {
InsnMatcher.addPredicate<GenericInstructionPredicateMatcher>(Predicate);
continue;
}
return failedImport("Src pattern child has predicate (" +
explainPredicates(Src) + ")");
if (!HasAddedBuiltinMatcher) {
return failedImport("Src pattern child has predicate (" +
explainPredicates(Src) + ")");
}
}
bool IsAtomic = false;