mirror of
https://github.com/RPCS3/llvm-mirror.git
synced 2024-11-22 10:42:39 +01:00
[SelectionDAG] Add getTokenFactor, which splits nodes with > 64k operands.
This functionality is required at multiple places which potentially create large operand lists, like SelectionDAGBuilder or DAGCombiner. Differential Revision: https://reviews.llvm.org/D56739 llvm-svn: 351552
This commit is contained in:
parent
def6797df7
commit
7e4de1d530
@ -1154,6 +1154,11 @@ public:
|
||||
SDValue Op3, SDValue Op4, SDValue Op5);
|
||||
SDNode *UpdateNodeOperands(SDNode *N, ArrayRef<SDValue> Ops);
|
||||
|
||||
/// Creates a new TokenFactor containing \p Vals. If \p Vals contains 64k
|
||||
/// values or more, move values into new TokenFactors in 64k-1 blocks, until
|
||||
/// the final TokenFactor has less than 64k operands.
|
||||
SDValue getTokenFactor(const SDLoc &DL, SmallVectorImpl<SDValue> &Vals);
|
||||
|
||||
/// *Mutate* the specified machine node's memory references to the provided
|
||||
/// list.
|
||||
void setNodeMemRefs(MachineSDNode *N,
|
||||
|
@ -9286,6 +9286,19 @@ void SelectionDAG::createOperands(SDNode *Node, ArrayRef<SDValue> Vals) {
|
||||
checkForCycles(Node);
|
||||
}
|
||||
|
||||
SDValue SelectionDAG::getTokenFactor(const SDLoc &DL,
|
||||
SmallVectorImpl<SDValue> &Vals) {
|
||||
size_t Limit = SDNode::getMaxNumOperands();
|
||||
while (Vals.size() > Limit) {
|
||||
unsigned SliceIdx = Vals.size() - Limit;
|
||||
auto ExtractedTFs = ArrayRef<SDValue>(Vals).slice(SliceIdx, Limit);
|
||||
SDValue NewTF = getNode(ISD::TokenFactor, DL, MVT::Other, ExtractedTFs);
|
||||
Vals.erase(Vals.begin() + SliceIdx, Vals.end());
|
||||
Vals.emplace_back(NewTF);
|
||||
}
|
||||
return getNode(ISD::TokenFactor, DL, MVT::Other, Vals);
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
static void checkForCyclesHelper(const SDNode *N,
|
||||
SmallPtrSetImpl<const SDNode*> &Visited,
|
||||
|
@ -1032,19 +1032,7 @@ SDValue SelectionDAGBuilder::getRoot() {
|
||||
}
|
||||
|
||||
// Otherwise, we have to make a token factor node.
|
||||
// If we have >= 2^16 loads then split across multiple token factors as
|
||||
// there's a 64k limit on the number of SDNode operands.
|
||||
SDValue Root;
|
||||
size_t Limit = SDNode::getMaxNumOperands();
|
||||
while (PendingLoads.size() > Limit) {
|
||||
unsigned SliceIdx = PendingLoads.size() - Limit;
|
||||
auto ExtractedTFs = ArrayRef<SDValue>(PendingLoads).slice(SliceIdx, Limit);
|
||||
SDValue NewTF =
|
||||
DAG.getNode(ISD::TokenFactor, getCurSDLoc(), MVT::Other, ExtractedTFs);
|
||||
PendingLoads.erase(PendingLoads.begin() + SliceIdx, PendingLoads.end());
|
||||
PendingLoads.emplace_back(NewTF);
|
||||
}
|
||||
Root = DAG.getNode(ISD::TokenFactor, getCurSDLoc(), MVT::Other, PendingLoads);
|
||||
SDValue Root = DAG.getTokenFactor(getCurSDLoc(), PendingLoads);
|
||||
PendingLoads.clear();
|
||||
DAG.setRoot(Root);
|
||||
return Root;
|
||||
|
Loading…
Reference in New Issue
Block a user