2010-01-06 17:48:02 +01:00
|
|
|
//===----- AggressiveAntiDepBreaker.cpp - Anti-dep breaker ----------------===//
|
2009-10-26 20:32:42 +01:00
|
|
|
//
|
|
|
|
// The LLVM Compiler Infrastructure
|
|
|
|
//
|
|
|
|
// This file is distributed under the University of Illinois Open Source
|
|
|
|
// License. See LICENSE.TXT for details.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
//
|
|
|
|
// This file implements the AggressiveAntiDepBreaker class, which
|
|
|
|
// implements register anti-dependence breaking during post-RA
|
|
|
|
// scheduling. It attempts to break all anti-dependencies within a
|
|
|
|
// block.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
2009-11-03 21:57:50 +01:00
|
|
|
#define DEBUG_TYPE "post-RA-sched"
|
2009-10-26 20:32:42 +01:00
|
|
|
#include "AggressiveAntiDepBreaker.h"
|
|
|
|
#include "llvm/CodeGen/MachineBasicBlock.h"
|
|
|
|
#include "llvm/CodeGen/MachineFrameInfo.h"
|
|
|
|
#include "llvm/CodeGen/MachineInstr.h"
|
|
|
|
#include "llvm/Target/TargetInstrInfo.h"
|
|
|
|
#include "llvm/Target/TargetMachine.h"
|
2010-06-16 09:35:02 +02:00
|
|
|
#include "llvm/Target/TargetInstrInfo.h"
|
2010-07-15 08:05:18 +02:00
|
|
|
#include "llvm/Target/TargetRegisterInfo.h"
|
2009-10-26 23:31:16 +01:00
|
|
|
#include "llvm/Support/CommandLine.h"
|
2009-10-26 20:32:42 +01:00
|
|
|
#include "llvm/Support/Debug.h"
|
|
|
|
#include "llvm/Support/ErrorHandling.h"
|
|
|
|
#include "llvm/Support/raw_ostream.h"
|
|
|
|
using namespace llvm;
|
|
|
|
|
2009-11-20 00:12:37 +01:00
|
|
|
// If DebugDiv > 0 then only break antidep with (ID % DebugDiv) == DebugMod
|
|
|
|
static cl::opt<int>
|
|
|
|
DebugDiv("agg-antidep-debugdiv",
|
2010-04-09 23:38:26 +02:00
|
|
|
cl::desc("Debug control for aggressive anti-dep breaker"),
|
|
|
|
cl::init(0), cl::Hidden);
|
2009-11-20 00:12:37 +01:00
|
|
|
static cl::opt<int>
|
|
|
|
DebugMod("agg-antidep-debugmod",
|
2010-04-09 23:38:26 +02:00
|
|
|
cl::desc("Debug control for aggressive anti-dep breaker"),
|
|
|
|
cl::init(0), cl::Hidden);
|
2009-11-20 00:12:37 +01:00
|
|
|
|
2009-12-09 18:18:22 +01:00
|
|
|
AggressiveAntiDepState::AggressiveAntiDepState(const unsigned TargetRegs,
|
|
|
|
MachineBasicBlock *BB) :
|
2010-07-15 21:58:14 +02:00
|
|
|
NumTargetRegs(TargetRegs), GroupNodes(TargetRegs, 0),
|
|
|
|
GroupNodeIndices(TargetRegs, 0),
|
|
|
|
KillIndices(TargetRegs, 0),
|
|
|
|
DefIndices(TargetRegs, 0)
|
|
|
|
{
|
2009-12-09 18:18:22 +01:00
|
|
|
const unsigned BBSize = BB->size();
|
|
|
|
for (unsigned i = 0; i < NumTargetRegs; ++i) {
|
|
|
|
// Initialize all registers to be in their own group. Initially we
|
|
|
|
// assign the register to the same-indexed GroupNode.
|
2009-10-26 23:31:16 +01:00
|
|
|
GroupNodeIndices[i] = i;
|
2009-12-09 18:18:22 +01:00
|
|
|
// Initialize the indices to indicate that no registers are live.
|
|
|
|
KillIndices[i] = ~0u;
|
|
|
|
DefIndices[i] = BBSize;
|
|
|
|
}
|
2009-10-26 23:31:16 +01:00
|
|
|
}
|
|
|
|
|
2010-07-15 21:41:20 +02:00
|
|
|
unsigned AggressiveAntiDepState::GetGroup(unsigned Reg) {
|
2009-10-26 23:31:16 +01:00
|
|
|
unsigned Node = GroupNodeIndices[Reg];
|
|
|
|
while (GroupNodes[Node] != Node)
|
|
|
|
Node = GroupNodes[Node];
|
|
|
|
|
|
|
|
return Node;
|
|
|
|
}
|
|
|
|
|
2009-11-13 20:52:48 +01:00
|
|
|
void AggressiveAntiDepState::GetGroupRegs(
|
|
|
|
unsigned Group,
|
|
|
|
std::vector<unsigned> &Regs,
|
|
|
|
std::multimap<unsigned, AggressiveAntiDepState::RegisterReference> *RegRefs)
|
2009-10-26 23:31:16 +01:00
|
|
|
{
|
2009-12-09 18:18:22 +01:00
|
|
|
for (unsigned Reg = 0; Reg != NumTargetRegs; ++Reg) {
|
2009-11-13 20:52:48 +01:00
|
|
|
if ((GetGroup(Reg) == Group) && (RegRefs->count(Reg) > 0))
|
2009-10-26 23:31:16 +01:00
|
|
|
Regs.push_back(Reg);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
unsigned AggressiveAntiDepState::UnionGroups(unsigned Reg1, unsigned Reg2)
|
|
|
|
{
|
|
|
|
assert(GroupNodes[0] == 0 && "GroupNode 0 not parent!");
|
|
|
|
assert(GroupNodeIndices[0] == 0 && "Reg 0 not in Group 0!");
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-10-26 23:31:16 +01:00
|
|
|
// find group for each register
|
|
|
|
unsigned Group1 = GetGroup(Reg1);
|
|
|
|
unsigned Group2 = GetGroup(Reg2);
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-10-26 23:31:16 +01:00
|
|
|
// if either group is 0, then that must become the parent
|
|
|
|
unsigned Parent = (Group1 == 0) ? Group1 : Group2;
|
|
|
|
unsigned Other = (Parent == Group1) ? Group2 : Group1;
|
|
|
|
GroupNodes.at(Other) = Parent;
|
|
|
|
return Parent;
|
|
|
|
}
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-10-26 23:31:16 +01:00
|
|
|
unsigned AggressiveAntiDepState::LeaveGroup(unsigned Reg)
|
|
|
|
{
|
|
|
|
// Create a new GroupNode for Reg. Reg's existing GroupNode must
|
|
|
|
// stay as is because there could be other GroupNodes referring to
|
|
|
|
// it.
|
|
|
|
unsigned idx = GroupNodes.size();
|
|
|
|
GroupNodes.push_back(idx);
|
|
|
|
GroupNodeIndices[Reg] = idx;
|
|
|
|
return idx;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool AggressiveAntiDepState::IsLive(unsigned Reg)
|
|
|
|
{
|
|
|
|
// KillIndex must be defined and DefIndex not defined for a register
|
|
|
|
// to be live.
|
|
|
|
return((KillIndices[Reg] != ~0u) && (DefIndices[Reg] == ~0u));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
2009-10-26 20:32:42 +01:00
|
|
|
AggressiveAntiDepBreaker::
|
2009-11-10 01:15:47 +01:00
|
|
|
AggressiveAntiDepBreaker(MachineFunction& MFi,
|
2010-01-06 17:48:02 +01:00
|
|
|
TargetSubtarget::RegClassVector& CriticalPathRCs) :
|
2009-10-26 20:32:42 +01:00
|
|
|
AntiDepBreaker(), MF(MFi),
|
|
|
|
MRI(MF.getRegInfo()),
|
2010-06-16 09:35:02 +02:00
|
|
|
TII(MF.getTarget().getInstrInfo()),
|
2009-10-26 20:32:42 +01:00
|
|
|
TRI(MF.getTarget().getRegisterInfo()),
|
|
|
|
AllocatableSet(TRI->getAllocatableSet(MF)),
|
2009-11-20 20:32:48 +01:00
|
|
|
State(NULL) {
|
2009-11-13 20:52:48 +01:00
|
|
|
/* Collect a bitset of all registers that are only broken if they
|
|
|
|
are on the critical path. */
|
|
|
|
for (unsigned i = 0, e = CriticalPathRCs.size(); i < e; ++i) {
|
|
|
|
BitVector CPSet = TRI->getAllocatableSet(MF, CriticalPathRCs[i]);
|
|
|
|
if (CriticalPathSet.none())
|
|
|
|
CriticalPathSet = CPSet;
|
|
|
|
else
|
|
|
|
CriticalPathSet |= CPSet;
|
|
|
|
}
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << "AntiDep Critical-Path Registers:");
|
2010-01-06 17:48:02 +01:00
|
|
|
DEBUG(for (int r = CriticalPathSet.find_first(); r != -1;
|
2009-11-13 20:52:48 +01:00
|
|
|
r = CriticalPathSet.find_next(r))
|
2009-12-24 01:14:25 +01:00
|
|
|
dbgs() << " " << TRI->getName(r));
|
|
|
|
DEBUG(dbgs() << '\n');
|
2009-10-26 20:32:42 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
AggressiveAntiDepBreaker::~AggressiveAntiDepBreaker() {
|
2009-10-26 23:31:16 +01:00
|
|
|
delete State;
|
|
|
|
}
|
2009-10-26 20:32:42 +01:00
|
|
|
|
2009-10-26 23:31:16 +01:00
|
|
|
void AggressiveAntiDepBreaker::StartBlock(MachineBasicBlock *BB) {
|
|
|
|
assert(State == NULL);
|
2009-12-09 18:18:22 +01:00
|
|
|
State = new AggressiveAntiDepState(TRI->getNumRegs(), BB);
|
2009-10-26 20:32:42 +01:00
|
|
|
|
|
|
|
bool IsReturnBlock = (!BB->empty() && BB->back().getDesc().isReturn());
|
2010-07-15 20:43:09 +02:00
|
|
|
std::vector<unsigned> &KillIndices = State->GetKillIndices();
|
|
|
|
std::vector<unsigned> &DefIndices = State->GetDefIndices();
|
2009-10-26 20:32:42 +01:00
|
|
|
|
|
|
|
// Determine the live-out physregs for this block.
|
|
|
|
if (IsReturnBlock) {
|
|
|
|
// In a return block, examine the function live-out regs.
|
|
|
|
for (MachineRegisterInfo::liveout_iterator I = MRI.liveout_begin(),
|
|
|
|
E = MRI.liveout_end(); I != E; ++I) {
|
2010-12-15 00:23:15 +01:00
|
|
|
for (const unsigned *Alias = TRI->getOverlaps(*I);
|
|
|
|
unsigned Reg = *Alias; ++Alias) {
|
|
|
|
State->UnionGroups(Reg, 0);
|
|
|
|
KillIndices[Reg] = BB->size();
|
|
|
|
DefIndices[Reg] = ~0u;
|
2009-10-26 20:32:42 +01:00
|
|
|
}
|
|
|
|
}
|
2010-06-16 09:35:02 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// In a non-return block, examine the live-in regs of all successors.
|
|
|
|
// Note a return block can have successors if the return instruction is
|
|
|
|
// predicated.
|
|
|
|
for (MachineBasicBlock::succ_iterator SI = BB->succ_begin(),
|
2009-10-26 20:32:42 +01:00
|
|
|
SE = BB->succ_end(); SI != SE; ++SI)
|
2010-06-16 09:35:02 +02:00
|
|
|
for (MachineBasicBlock::livein_iterator I = (*SI)->livein_begin(),
|
2009-10-26 20:32:42 +01:00
|
|
|
E = (*SI)->livein_end(); I != E; ++I) {
|
2010-12-15 00:23:15 +01:00
|
|
|
for (const unsigned *Alias = TRI->getOverlaps(*I);
|
|
|
|
unsigned Reg = *Alias; ++Alias) {
|
|
|
|
State->UnionGroups(Reg, 0);
|
|
|
|
KillIndices[Reg] = BB->size();
|
|
|
|
DefIndices[Reg] = ~0u;
|
2009-10-26 20:32:42 +01:00
|
|
|
}
|
2010-06-16 09:35:02 +02:00
|
|
|
}
|
2009-10-26 20:32:42 +01:00
|
|
|
|
|
|
|
// Mark live-out callee-saved registers. In a return block this is
|
|
|
|
// all callee-saved registers. In non-return this is any
|
|
|
|
// callee-saved register that is not saved in the prolog.
|
|
|
|
const MachineFrameInfo *MFI = MF.getFrameInfo();
|
|
|
|
BitVector Pristine = MFI->getPristineRegs(BB);
|
|
|
|
for (const unsigned *I = TRI->getCalleeSavedRegs(); *I; ++I) {
|
|
|
|
unsigned Reg = *I;
|
|
|
|
if (!IsReturnBlock && !Pristine.test(Reg)) continue;
|
2010-12-15 00:23:15 +01:00
|
|
|
for (const unsigned *Alias = TRI->getOverlaps(Reg);
|
|
|
|
unsigned AliasReg = *Alias; ++Alias) {
|
2009-10-26 23:31:16 +01:00
|
|
|
State->UnionGroups(AliasReg, 0);
|
2009-10-26 20:32:42 +01:00
|
|
|
KillIndices[AliasReg] = BB->size();
|
|
|
|
DefIndices[AliasReg] = ~0u;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void AggressiveAntiDepBreaker::FinishBlock() {
|
2009-10-26 23:31:16 +01:00
|
|
|
delete State;
|
|
|
|
State = NULL;
|
2009-10-26 20:32:42 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void AggressiveAntiDepBreaker::Observe(MachineInstr *MI, unsigned Count,
|
2010-04-09 23:38:26 +02:00
|
|
|
unsigned InsertPosIndex) {
|
2009-10-26 20:32:42 +01:00
|
|
|
assert(Count < InsertPosIndex && "Instruction index out of expected range!");
|
|
|
|
|
2009-10-30 00:30:59 +01:00
|
|
|
std::set<unsigned> PassthruRegs;
|
|
|
|
GetPassthruRegs(MI, PassthruRegs);
|
|
|
|
PrescanInstruction(MI, Count, PassthruRegs);
|
|
|
|
ScanInstruction(MI, Count);
|
|
|
|
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << "Observe: ");
|
2009-10-26 20:32:42 +01:00
|
|
|
DEBUG(MI->dump());
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << "\tRegs:");
|
2009-10-26 20:32:42 +01:00
|
|
|
|
2010-07-15 20:43:09 +02:00
|
|
|
std::vector<unsigned> &DefIndices = State->GetDefIndices();
|
2009-12-09 18:18:22 +01:00
|
|
|
for (unsigned Reg = 0; Reg != TRI->getNumRegs(); ++Reg) {
|
2009-10-26 20:32:42 +01:00
|
|
|
// If Reg is current live, then mark that it can't be renamed as
|
|
|
|
// we don't know the extent of its live-range anymore (now that it
|
|
|
|
// has been scheduled). If it is not live but was defined in the
|
|
|
|
// previous schedule region, then set its def index to the most
|
|
|
|
// conservative location (i.e. the beginning of the previous
|
|
|
|
// schedule region).
|
2009-10-26 23:31:16 +01:00
|
|
|
if (State->IsLive(Reg)) {
|
|
|
|
DEBUG(if (State->GetGroup(Reg) != 0)
|
2010-01-06 17:48:02 +01:00
|
|
|
dbgs() << " " << TRI->getName(Reg) << "=g" <<
|
2009-10-26 23:31:16 +01:00
|
|
|
State->GetGroup(Reg) << "->g0(region live-out)");
|
|
|
|
State->UnionGroups(Reg, 0);
|
2010-01-06 17:48:02 +01:00
|
|
|
} else if ((DefIndices[Reg] < InsertPosIndex)
|
|
|
|
&& (DefIndices[Reg] >= Count)) {
|
2009-10-26 20:32:42 +01:00
|
|
|
DefIndices[Reg] = Count;
|
|
|
|
}
|
|
|
|
}
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << '\n');
|
2009-10-26 20:32:42 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
bool AggressiveAntiDepBreaker::IsImplicitDefUse(MachineInstr *MI,
|
2010-04-09 23:38:26 +02:00
|
|
|
MachineOperand& MO)
|
2009-10-26 20:32:42 +01:00
|
|
|
{
|
|
|
|
if (!MO.isReg() || !MO.isImplicit())
|
|
|
|
return false;
|
|
|
|
|
|
|
|
unsigned Reg = MO.getReg();
|
|
|
|
if (Reg == 0)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
MachineOperand *Op = NULL;
|
|
|
|
if (MO.isDef())
|
|
|
|
Op = MI->findRegisterUseOperand(Reg, true);
|
|
|
|
else
|
|
|
|
Op = MI->findRegisterDefOperand(Reg);
|
|
|
|
|
|
|
|
return((Op != NULL) && Op->isImplicit());
|
|
|
|
}
|
|
|
|
|
|
|
|
void AggressiveAntiDepBreaker::GetPassthruRegs(MachineInstr *MI,
|
|
|
|
std::set<unsigned>& PassthruRegs) {
|
|
|
|
for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
|
|
|
|
MachineOperand &MO = MI->getOperand(i);
|
|
|
|
if (!MO.isReg()) continue;
|
2010-01-06 17:48:02 +01:00
|
|
|
if ((MO.isDef() && MI->isRegTiedToUseOperand(i)) ||
|
2009-10-26 20:32:42 +01:00
|
|
|
IsImplicitDefUse(MI, MO)) {
|
|
|
|
const unsigned Reg = MO.getReg();
|
|
|
|
PassthruRegs.insert(Reg);
|
|
|
|
for (const unsigned *Subreg = TRI->getSubRegisters(Reg);
|
|
|
|
*Subreg; ++Subreg) {
|
|
|
|
PassthruRegs.insert(*Subreg);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2009-11-20 20:32:48 +01:00
|
|
|
/// AntiDepEdges - Return in Edges the anti- and output- dependencies
|
|
|
|
/// in SU that we want to consider for breaking.
|
2010-04-20 01:11:58 +02:00
|
|
|
static void AntiDepEdges(const SUnit *SU, std::vector<const SDep*>& Edges) {
|
2009-11-20 20:32:48 +01:00
|
|
|
SmallSet<unsigned, 4> RegSet;
|
2010-04-20 01:11:58 +02:00
|
|
|
for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end();
|
2009-10-26 20:32:42 +01:00
|
|
|
P != PE; ++P) {
|
2009-11-12 20:08:21 +01:00
|
|
|
if ((P->getKind() == SDep::Anti) || (P->getKind() == SDep::Output)) {
|
2009-10-26 20:32:42 +01:00
|
|
|
unsigned Reg = P->getReg();
|
2009-11-20 20:32:48 +01:00
|
|
|
if (RegSet.count(Reg) == 0) {
|
2009-10-26 20:32:42 +01:00
|
|
|
Edges.push_back(&*P);
|
2009-11-20 20:32:48 +01:00
|
|
|
RegSet.insert(Reg);
|
2009-10-26 20:32:42 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2009-11-13 20:52:48 +01:00
|
|
|
/// CriticalPathStep - Return the next SUnit after SU on the bottom-up
|
|
|
|
/// critical path.
|
2010-04-20 01:11:58 +02:00
|
|
|
static const SUnit *CriticalPathStep(const SUnit *SU) {
|
|
|
|
const SDep *Next = 0;
|
2009-11-13 20:52:48 +01:00
|
|
|
unsigned NextDepth = 0;
|
|
|
|
// Find the predecessor edge with the greatest depth.
|
|
|
|
if (SU != 0) {
|
2010-04-20 01:11:58 +02:00
|
|
|
for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end();
|
2009-11-13 20:52:48 +01:00
|
|
|
P != PE; ++P) {
|
2010-04-20 01:11:58 +02:00
|
|
|
const SUnit *PredSU = P->getSUnit();
|
2009-11-13 20:52:48 +01:00
|
|
|
unsigned PredLatency = P->getLatency();
|
|
|
|
unsigned PredTotalLatency = PredSU->getDepth() + PredLatency;
|
|
|
|
// In the case of a latency tie, prefer an anti-dependency edge over
|
|
|
|
// other types of edges.
|
|
|
|
if (NextDepth < PredTotalLatency ||
|
|
|
|
(NextDepth == PredTotalLatency && P->getKind() == SDep::Anti)) {
|
|
|
|
NextDepth = PredTotalLatency;
|
|
|
|
Next = &*P;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return (Next) ? Next->getSUnit() : 0;
|
|
|
|
}
|
|
|
|
|
2009-10-29 20:17:04 +01:00
|
|
|
void AggressiveAntiDepBreaker::HandleLastUse(unsigned Reg, unsigned KillIdx,
|
2010-01-06 17:48:02 +01:00
|
|
|
const char *tag,
|
|
|
|
const char *header,
|
2009-11-20 00:12:37 +01:00
|
|
|
const char *footer) {
|
2010-07-15 20:43:09 +02:00
|
|
|
std::vector<unsigned> &KillIndices = State->GetKillIndices();
|
|
|
|
std::vector<unsigned> &DefIndices = State->GetDefIndices();
|
2010-01-06 17:48:02 +01:00
|
|
|
std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
|
2009-10-29 20:17:04 +01:00
|
|
|
RegRefs = State->GetRegRefs();
|
|
|
|
|
|
|
|
if (!State->IsLive(Reg)) {
|
|
|
|
KillIndices[Reg] = KillIdx;
|
|
|
|
DefIndices[Reg] = ~0u;
|
|
|
|
RegRefs.erase(Reg);
|
|
|
|
State->LeaveGroup(Reg);
|
2009-11-20 00:12:37 +01:00
|
|
|
DEBUG(if (header != NULL) {
|
2009-12-24 01:14:25 +01:00
|
|
|
dbgs() << header << TRI->getName(Reg); header = NULL; });
|
|
|
|
DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << tag);
|
2009-10-29 20:17:04 +01:00
|
|
|
}
|
|
|
|
// Repeat for subregisters.
|
|
|
|
for (const unsigned *Subreg = TRI->getSubRegisters(Reg);
|
|
|
|
*Subreg; ++Subreg) {
|
|
|
|
unsigned SubregReg = *Subreg;
|
|
|
|
if (!State->IsLive(SubregReg)) {
|
|
|
|
KillIndices[SubregReg] = KillIdx;
|
|
|
|
DefIndices[SubregReg] = ~0u;
|
|
|
|
RegRefs.erase(SubregReg);
|
|
|
|
State->LeaveGroup(SubregReg);
|
2009-11-20 00:12:37 +01:00
|
|
|
DEBUG(if (header != NULL) {
|
2009-12-24 01:14:25 +01:00
|
|
|
dbgs() << header << TRI->getName(Reg); header = NULL; });
|
|
|
|
DEBUG(dbgs() << " " << TRI->getName(SubregReg) << "->g" <<
|
2009-10-29 20:17:04 +01:00
|
|
|
State->GetGroup(SubregReg) << tag);
|
|
|
|
}
|
|
|
|
}
|
2009-11-20 00:12:37 +01:00
|
|
|
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(if ((header == NULL) && (footer != NULL)) dbgs() << footer);
|
2009-10-29 20:17:04 +01:00
|
|
|
}
|
|
|
|
|
2010-01-06 17:48:02 +01:00
|
|
|
void AggressiveAntiDepBreaker::PrescanInstruction(MachineInstr *MI,
|
|
|
|
unsigned Count,
|
2010-04-09 23:38:26 +02:00
|
|
|
std::set<unsigned>& PassthruRegs) {
|
2010-07-15 20:43:09 +02:00
|
|
|
std::vector<unsigned> &DefIndices = State->GetDefIndices();
|
2010-01-06 17:48:02 +01:00
|
|
|
std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
|
2009-10-26 23:31:16 +01:00
|
|
|
RegRefs = State->GetRegRefs();
|
|
|
|
|
2009-10-29 20:17:04 +01:00
|
|
|
// Handle dead defs by simulating a last-use of the register just
|
|
|
|
// after the def. A dead def can occur because the def is truely
|
|
|
|
// dead, or because only a subregister is live at the def. If we
|
|
|
|
// don't do this the dead def will be incorrectly merged into the
|
|
|
|
// previous def.
|
2009-10-26 20:32:42 +01:00
|
|
|
for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
|
|
|
|
MachineOperand &MO = MI->getOperand(i);
|
|
|
|
if (!MO.isReg() || !MO.isDef()) continue;
|
|
|
|
unsigned Reg = MO.getReg();
|
|
|
|
if (Reg == 0) continue;
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-11-20 00:12:37 +01:00
|
|
|
HandleLastUse(Reg, Count + 1, "", "\tDead Def: ", "\n");
|
2009-10-26 20:32:42 +01:00
|
|
|
}
|
|
|
|
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << "\tDef Groups:");
|
2009-10-26 20:32:42 +01:00
|
|
|
for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
|
|
|
|
MachineOperand &MO = MI->getOperand(i);
|
|
|
|
if (!MO.isReg() || !MO.isDef()) continue;
|
|
|
|
unsigned Reg = MO.getReg();
|
|
|
|
if (Reg == 0) continue;
|
|
|
|
|
2010-01-06 17:48:02 +01:00
|
|
|
DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" << State->GetGroup(Reg));
|
2009-10-26 20:32:42 +01:00
|
|
|
|
2009-10-29 20:17:04 +01:00
|
|
|
// If MI's defs have a special allocation requirement, don't allow
|
2009-10-26 20:32:42 +01:00
|
|
|
// any def registers to be changed. Also assume all registers
|
|
|
|
// defined in a call must not be changed (ABI).
|
2010-06-16 09:35:02 +02:00
|
|
|
if (MI->getDesc().isCall() || MI->getDesc().hasExtraDefRegAllocReq() ||
|
|
|
|
TII->isPredicated(MI)) {
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)");
|
2009-10-26 23:31:16 +01:00
|
|
|
State->UnionGroups(Reg, 0);
|
2009-10-26 20:32:42 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Any aliased that are live at this point are completely or
|
2009-10-29 20:17:04 +01:00
|
|
|
// partially defined here, so group those aliases with Reg.
|
2009-10-26 20:32:42 +01:00
|
|
|
for (const unsigned *Alias = TRI->getAliasSet(Reg); *Alias; ++Alias) {
|
|
|
|
unsigned AliasReg = *Alias;
|
2009-10-26 23:31:16 +01:00
|
|
|
if (State->IsLive(AliasReg)) {
|
|
|
|
State->UnionGroups(Reg, AliasReg);
|
2010-01-06 17:48:02 +01:00
|
|
|
DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << "(via " <<
|
2009-10-26 20:32:42 +01:00
|
|
|
TRI->getName(AliasReg) << ")");
|
|
|
|
}
|
|
|
|
}
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-10-26 20:32:42 +01:00
|
|
|
// Note register reference...
|
|
|
|
const TargetRegisterClass *RC = NULL;
|
|
|
|
if (i < MI->getDesc().getNumOperands())
|
|
|
|
RC = MI->getDesc().OpInfo[i].getRegClass(TRI);
|
2009-10-26 23:31:16 +01:00
|
|
|
AggressiveAntiDepState::RegisterReference RR = { &MO, RC };
|
2009-10-26 20:32:42 +01:00
|
|
|
RegRefs.insert(std::make_pair(Reg, RR));
|
|
|
|
}
|
|
|
|
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << '\n');
|
2009-10-29 20:17:04 +01:00
|
|
|
|
|
|
|
// Scan the register defs for this instruction and update
|
|
|
|
// live-ranges.
|
|
|
|
for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
|
|
|
|
MachineOperand &MO = MI->getOperand(i);
|
|
|
|
if (!MO.isReg() || !MO.isDef()) continue;
|
|
|
|
unsigned Reg = MO.getReg();
|
|
|
|
if (Reg == 0) continue;
|
2009-11-20 00:12:37 +01:00
|
|
|
// Ignore KILLs and passthru registers for liveness...
|
2010-02-09 20:54:29 +01:00
|
|
|
if (MI->isKill() || (PassthruRegs.count(Reg) != 0))
|
2009-11-20 00:12:37 +01:00
|
|
|
continue;
|
2009-10-29 20:17:04 +01:00
|
|
|
|
2009-11-20 00:12:37 +01:00
|
|
|
// Update def for Reg and aliases.
|
2010-12-15 00:23:15 +01:00
|
|
|
for (const unsigned *Alias = TRI->getOverlaps(Reg);
|
|
|
|
unsigned AliasReg = *Alias; ++Alias)
|
2009-11-20 00:12:37 +01:00
|
|
|
DefIndices[AliasReg] = Count;
|
2009-10-29 20:17:04 +01:00
|
|
|
}
|
2009-10-26 20:32:42 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void AggressiveAntiDepBreaker::ScanInstruction(MachineInstr *MI,
|
2010-04-09 23:38:26 +02:00
|
|
|
unsigned Count) {
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << "\tUse Groups:");
|
2010-01-06 17:48:02 +01:00
|
|
|
std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
|
2009-10-26 23:31:16 +01:00
|
|
|
RegRefs = State->GetRegRefs();
|
2009-10-26 20:32:42 +01:00
|
|
|
|
2010-06-16 09:35:02 +02:00
|
|
|
// If MI's uses have special allocation requirement, don't allow
|
|
|
|
// any use registers to be changed. Also assume all registers
|
|
|
|
// used in a call must not be changed (ABI).
|
|
|
|
// FIXME: The issue with predicated instruction is more complex. We are being
|
|
|
|
// conservatively here because the kill markers cannot be trusted after
|
|
|
|
// if-conversion:
|
|
|
|
// %R6<def> = LDR %SP, %reg0, 92, pred:14, pred:%reg0; mem:LD4[FixedStack14]
|
|
|
|
// ...
|
|
|
|
// STR %R0, %R6<kill>, %reg0, 0, pred:0, pred:%CPSR; mem:ST4[%395]
|
|
|
|
// %R6<def> = LDR %SP, %reg0, 100, pred:0, pred:%CPSR; mem:LD4[FixedStack12]
|
|
|
|
// STR %R0, %R6<kill>, %reg0, 0, pred:14, pred:%reg0; mem:ST4[%396](align=8)
|
|
|
|
//
|
|
|
|
// The first R6 kill is not really a kill since it's killed by a predicated
|
|
|
|
// instruction which may not be executed. The second R6 def may or may not
|
|
|
|
// re-define R6 so it's not safe to change it since the last R6 use cannot be
|
|
|
|
// changed.
|
|
|
|
bool Special = MI->getDesc().isCall() ||
|
|
|
|
MI->getDesc().hasExtraSrcRegAllocReq() ||
|
|
|
|
TII->isPredicated(MI);
|
|
|
|
|
2009-10-26 20:32:42 +01:00
|
|
|
// Scan the register uses for this instruction and update
|
|
|
|
// live-ranges, groups and RegRefs.
|
|
|
|
for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
|
|
|
|
MachineOperand &MO = MI->getOperand(i);
|
|
|
|
if (!MO.isReg() || !MO.isUse()) continue;
|
|
|
|
unsigned Reg = MO.getReg();
|
|
|
|
if (Reg == 0) continue;
|
2010-01-06 17:48:02 +01:00
|
|
|
|
|
|
|
DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" <<
|
|
|
|
State->GetGroup(Reg));
|
2009-10-26 20:32:42 +01:00
|
|
|
|
|
|
|
// It wasn't previously live but now it is, this is a kill. Forget
|
|
|
|
// the previous live-range information and start a new live-range
|
|
|
|
// for the register.
|
2009-10-29 20:17:04 +01:00
|
|
|
HandleLastUse(Reg, Count, "(last-use)");
|
2009-10-26 20:32:42 +01:00
|
|
|
|
2010-06-16 09:35:02 +02:00
|
|
|
if (Special) {
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)");
|
2009-10-26 23:31:16 +01:00
|
|
|
State->UnionGroups(Reg, 0);
|
2009-10-26 20:32:42 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Note register reference...
|
|
|
|
const TargetRegisterClass *RC = NULL;
|
|
|
|
if (i < MI->getDesc().getNumOperands())
|
|
|
|
RC = MI->getDesc().OpInfo[i].getRegClass(TRI);
|
2009-10-26 23:31:16 +01:00
|
|
|
AggressiveAntiDepState::RegisterReference RR = { &MO, RC };
|
2009-10-26 20:32:42 +01:00
|
|
|
RegRefs.insert(std::make_pair(Reg, RR));
|
|
|
|
}
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << '\n');
|
2009-10-26 20:32:42 +01:00
|
|
|
|
|
|
|
// Form a group of all defs and uses of a KILL instruction to ensure
|
|
|
|
// that all registers are renamed as a group.
|
2010-02-09 20:54:29 +01:00
|
|
|
if (MI->isKill()) {
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << "\tKill Group:");
|
2009-10-26 20:32:42 +01:00
|
|
|
|
|
|
|
unsigned FirstReg = 0;
|
|
|
|
for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
|
|
|
|
MachineOperand &MO = MI->getOperand(i);
|
|
|
|
if (!MO.isReg()) continue;
|
|
|
|
unsigned Reg = MO.getReg();
|
|
|
|
if (Reg == 0) continue;
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-10-26 20:32:42 +01:00
|
|
|
if (FirstReg != 0) {
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << "=" << TRI->getName(Reg));
|
2009-10-26 23:31:16 +01:00
|
|
|
State->UnionGroups(FirstReg, Reg);
|
2009-10-26 20:32:42 +01:00
|
|
|
} else {
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << " " << TRI->getName(Reg));
|
2009-10-26 20:32:42 +01:00
|
|
|
FirstReg = Reg;
|
|
|
|
}
|
|
|
|
}
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << "->g" << State->GetGroup(FirstReg) << '\n');
|
2009-10-26 20:32:42 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
BitVector AggressiveAntiDepBreaker::GetRenameRegisters(unsigned Reg) {
|
|
|
|
BitVector BV(TRI->getNumRegs(), false);
|
|
|
|
bool first = true;
|
|
|
|
|
|
|
|
// Check all references that need rewriting for Reg. For each, use
|
|
|
|
// the corresponding register class to narrow the set of registers
|
|
|
|
// that are appropriate for renaming.
|
2010-01-06 17:48:02 +01:00
|
|
|
std::pair<std::multimap<unsigned,
|
2009-10-26 23:31:16 +01:00
|
|
|
AggressiveAntiDepState::RegisterReference>::iterator,
|
|
|
|
std::multimap<unsigned,
|
|
|
|
AggressiveAntiDepState::RegisterReference>::iterator>
|
|
|
|
Range = State->GetRegRefs().equal_range(Reg);
|
2010-01-06 17:48:02 +01:00
|
|
|
for (std::multimap<unsigned,
|
|
|
|
AggressiveAntiDepState::RegisterReference>::iterator Q = Range.first,
|
|
|
|
QE = Range.second; Q != QE; ++Q) {
|
2009-10-26 20:32:42 +01:00
|
|
|
const TargetRegisterClass *RC = Q->second.RC;
|
|
|
|
if (RC == NULL) continue;
|
|
|
|
|
|
|
|
BitVector RCBV = TRI->getAllocatableSet(MF, RC);
|
|
|
|
if (first) {
|
|
|
|
BV |= RCBV;
|
|
|
|
first = false;
|
|
|
|
} else {
|
|
|
|
BV &= RCBV;
|
|
|
|
}
|
|
|
|
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << " " << RC->getName());
|
2009-10-26 20:32:42 +01:00
|
|
|
}
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-10-26 20:32:42 +01:00
|
|
|
return BV;
|
2010-01-06 17:48:02 +01:00
|
|
|
}
|
2009-10-26 20:32:42 +01:00
|
|
|
|
|
|
|
bool AggressiveAntiDepBreaker::FindSuitableFreeRegisters(
|
2009-11-05 02:19:35 +01:00
|
|
|
unsigned AntiDepGroupIndex,
|
|
|
|
RenameOrderType& RenameOrder,
|
|
|
|
std::map<unsigned, unsigned> &RenameMap) {
|
2010-07-15 20:43:09 +02:00
|
|
|
std::vector<unsigned> &KillIndices = State->GetKillIndices();
|
|
|
|
std::vector<unsigned> &DefIndices = State->GetDefIndices();
|
2010-01-06 17:48:02 +01:00
|
|
|
std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
|
2009-10-26 23:31:16 +01:00
|
|
|
RegRefs = State->GetRegRefs();
|
|
|
|
|
2009-11-13 20:52:48 +01:00
|
|
|
// Collect all referenced registers in the same group as
|
|
|
|
// AntiDepReg. These all need to be renamed together if we are to
|
|
|
|
// break the anti-dependence.
|
2009-10-26 20:32:42 +01:00
|
|
|
std::vector<unsigned> Regs;
|
2009-11-13 20:52:48 +01:00
|
|
|
State->GetGroupRegs(AntiDepGroupIndex, Regs, &RegRefs);
|
2009-10-26 20:32:42 +01:00
|
|
|
assert(Regs.size() > 0 && "Empty register group!");
|
|
|
|
if (Regs.size() == 0)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
// Find the "superest" register in the group. At the same time,
|
|
|
|
// collect the BitVector of registers that can be used to rename
|
|
|
|
// each register.
|
2010-01-06 17:48:02 +01:00
|
|
|
DEBUG(dbgs() << "\tRename Candidates for Group g" << AntiDepGroupIndex
|
|
|
|
<< ":\n");
|
2009-10-26 20:32:42 +01:00
|
|
|
std::map<unsigned, BitVector> RenameRegisterMap;
|
|
|
|
unsigned SuperReg = 0;
|
|
|
|
for (unsigned i = 0, e = Regs.size(); i != e; ++i) {
|
|
|
|
unsigned Reg = Regs[i];
|
|
|
|
if ((SuperReg == 0) || TRI->isSuperRegister(SuperReg, Reg))
|
|
|
|
SuperReg = Reg;
|
|
|
|
|
|
|
|
// If Reg has any references, then collect possible rename regs
|
|
|
|
if (RegRefs.count(Reg) > 0) {
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << "\t\t" << TRI->getName(Reg) << ":");
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-10-26 20:32:42 +01:00
|
|
|
BitVector BV = GetRenameRegisters(Reg);
|
|
|
|
RenameRegisterMap.insert(std::pair<unsigned, BitVector>(Reg, BV));
|
|
|
|
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << " ::");
|
2009-10-26 20:32:42 +01:00
|
|
|
DEBUG(for (int r = BV.find_first(); r != -1; r = BV.find_next(r))
|
2009-12-24 01:14:25 +01:00
|
|
|
dbgs() << " " << TRI->getName(r));
|
|
|
|
DEBUG(dbgs() << "\n");
|
2009-10-26 20:32:42 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// All group registers should be a subreg of SuperReg.
|
|
|
|
for (unsigned i = 0, e = Regs.size(); i != e; ++i) {
|
|
|
|
unsigned Reg = Regs[i];
|
|
|
|
if (Reg == SuperReg) continue;
|
|
|
|
bool IsSub = TRI->isSubRegister(SuperReg, Reg);
|
|
|
|
assert(IsSub && "Expecting group subregister");
|
|
|
|
if (!IsSub)
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2009-11-21 00:33:54 +01:00
|
|
|
#ifndef NDEBUG
|
|
|
|
// If DebugDiv > 0 then only rename (renamecnt % DebugDiv) == DebugMod
|
|
|
|
if (DebugDiv > 0) {
|
|
|
|
static int renamecnt = 0;
|
|
|
|
if (renamecnt++ % DebugDiv != DebugMod)
|
|
|
|
return false;
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-12-24 01:14:25 +01:00
|
|
|
dbgs() << "*** Performing rename " << TRI->getName(SuperReg) <<
|
2009-11-21 00:33:54 +01:00
|
|
|
" for debug ***\n";
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2009-11-05 02:19:35 +01:00
|
|
|
// Check each possible rename register for SuperReg in round-robin
|
|
|
|
// order. If that register is available, and the corresponding
|
|
|
|
// registers are available for the other group subregisters, then we
|
|
|
|
// can use those registers to rename.
|
2010-07-12 04:55:34 +02:00
|
|
|
|
|
|
|
// FIXME: Using getMinimalPhysRegClass is very conservative. We should
|
|
|
|
// check every use of the register and find the largest register class
|
|
|
|
// that can be used in all of them.
|
2010-01-06 17:48:02 +01:00
|
|
|
const TargetRegisterClass *SuperRC =
|
2010-07-12 04:55:34 +02:00
|
|
|
TRI->getMinimalPhysRegClass(SuperReg, MVT::Other);
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-11-05 02:19:35 +01:00
|
|
|
const TargetRegisterClass::iterator RB = SuperRC->allocation_order_begin(MF);
|
|
|
|
const TargetRegisterClass::iterator RE = SuperRC->allocation_order_end(MF);
|
|
|
|
if (RB == RE) {
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << "\tEmpty Super Regclass!!\n");
|
2009-11-05 02:19:35 +01:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << "\tFind Registers:");
|
2009-11-20 00:12:37 +01:00
|
|
|
|
2009-11-05 02:19:35 +01:00
|
|
|
if (RenameOrder.count(SuperRC) == 0)
|
|
|
|
RenameOrder.insert(RenameOrderType::value_type(SuperRC, RE));
|
|
|
|
|
2009-11-05 02:45:50 +01:00
|
|
|
const TargetRegisterClass::iterator OrigR = RenameOrder[SuperRC];
|
2009-11-05 02:19:35 +01:00
|
|
|
const TargetRegisterClass::iterator EndR = ((OrigR == RE) ? RB : OrigR);
|
|
|
|
TargetRegisterClass::iterator R = OrigR;
|
|
|
|
do {
|
|
|
|
if (R == RB) R = RE;
|
|
|
|
--R;
|
2009-11-21 00:33:54 +01:00
|
|
|
const unsigned NewSuperReg = *R;
|
2010-09-02 19:12:55 +02:00
|
|
|
// Don't consider non-allocatable registers
|
|
|
|
if (!AllocatableSet.test(NewSuperReg)) continue;
|
2009-10-26 20:32:42 +01:00
|
|
|
// Don't replace a register with itself.
|
2009-11-21 00:33:54 +01:00
|
|
|
if (NewSuperReg == SuperReg) continue;
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << " [" << TRI->getName(NewSuperReg) << ':');
|
2009-11-21 00:33:54 +01:00
|
|
|
RenameMap.clear();
|
|
|
|
|
|
|
|
// For each referenced group register (which must be a SuperReg or
|
|
|
|
// a subregister of SuperReg), find the corresponding subregister
|
|
|
|
// of NewSuperReg and make sure it is free to be renamed.
|
|
|
|
for (unsigned i = 0, e = Regs.size(); i != e; ++i) {
|
|
|
|
unsigned Reg = Regs[i];
|
|
|
|
unsigned NewReg = 0;
|
|
|
|
if (Reg == SuperReg) {
|
|
|
|
NewReg = NewSuperReg;
|
|
|
|
} else {
|
|
|
|
unsigned NewSubRegIdx = TRI->getSubRegIndex(SuperReg, Reg);
|
|
|
|
if (NewSubRegIdx != 0)
|
|
|
|
NewReg = TRI->getSubReg(NewSuperReg, NewSubRegIdx);
|
|
|
|
}
|
|
|
|
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << " " << TRI->getName(NewReg));
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-11-21 00:33:54 +01:00
|
|
|
// Check if Reg can be renamed to NewReg.
|
|
|
|
BitVector BV = RenameRegisterMap[Reg];
|
|
|
|
if (!BV.test(NewReg)) {
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << "(no rename)");
|
2009-11-21 00:33:54 +01:00
|
|
|
goto next_super_reg;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If NewReg is dead and NewReg's most recent def is not before
|
|
|
|
// Regs's kill, it's safe to replace Reg with NewReg. We
|
|
|
|
// must also check all aliases of NewReg, because we can't define a
|
|
|
|
// register when any sub or super is already live.
|
|
|
|
if (State->IsLive(NewReg) || (KillIndices[Reg] > DefIndices[NewReg])) {
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << "(live)");
|
2009-11-21 00:33:54 +01:00
|
|
|
goto next_super_reg;
|
|
|
|
} else {
|
|
|
|
bool found = false;
|
|
|
|
for (const unsigned *Alias = TRI->getAliasSet(NewReg);
|
|
|
|
*Alias; ++Alias) {
|
|
|
|
unsigned AliasReg = *Alias;
|
2010-01-06 17:48:02 +01:00
|
|
|
if (State->IsLive(AliasReg) ||
|
|
|
|
(KillIndices[Reg] > DefIndices[AliasReg])) {
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << "(alias " << TRI->getName(AliasReg) << " live)");
|
2009-11-21 00:33:54 +01:00
|
|
|
found = true;
|
|
|
|
break;
|
|
|
|
}
|
2009-10-26 20:32:42 +01:00
|
|
|
}
|
2009-11-21 00:33:54 +01:00
|
|
|
if (found)
|
|
|
|
goto next_super_reg;
|
2009-10-26 20:32:42 +01:00
|
|
|
}
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-11-21 00:33:54 +01:00
|
|
|
// Record that 'Reg' can be renamed to 'NewReg'.
|
|
|
|
RenameMap.insert(std::pair<unsigned, unsigned>(Reg, NewReg));
|
2009-10-26 20:32:42 +01:00
|
|
|
}
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-11-21 00:33:54 +01:00
|
|
|
// If we fall-out here, then every register in the group can be
|
|
|
|
// renamed, as recorded in RenameMap.
|
|
|
|
RenameOrder.erase(SuperRC);
|
|
|
|
RenameOrder.insert(RenameOrderType::value_type(SuperRC, R));
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << "]\n");
|
2009-11-21 00:33:54 +01:00
|
|
|
return true;
|
|
|
|
|
|
|
|
next_super_reg:
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << ']');
|
2009-11-05 02:19:35 +01:00
|
|
|
} while (R != EndR);
|
2009-10-26 20:32:42 +01:00
|
|
|
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << '\n');
|
2009-10-26 20:32:42 +01:00
|
|
|
|
|
|
|
// No registers are free and available!
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// BreakAntiDependencies - Identifiy anti-dependencies within the
|
|
|
|
/// ScheduleDAG and break them by renaming registers.
|
|
|
|
///
|
2009-10-26 23:31:16 +01:00
|
|
|
unsigned AggressiveAntiDepBreaker::BreakAntiDependencies(
|
2010-04-20 01:11:58 +02:00
|
|
|
const std::vector<SUnit>& SUnits,
|
|
|
|
MachineBasicBlock::iterator Begin,
|
|
|
|
MachineBasicBlock::iterator End,
|
2009-10-26 23:31:16 +01:00
|
|
|
unsigned InsertPosIndex) {
|
2010-07-15 20:43:09 +02:00
|
|
|
std::vector<unsigned> &KillIndices = State->GetKillIndices();
|
|
|
|
std::vector<unsigned> &DefIndices = State->GetDefIndices();
|
2010-01-06 17:48:02 +01:00
|
|
|
std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
|
2009-10-26 23:31:16 +01:00
|
|
|
RegRefs = State->GetRegRefs();
|
|
|
|
|
2009-10-26 20:32:42 +01:00
|
|
|
// The code below assumes that there is at least one instruction,
|
|
|
|
// so just duck out immediately if the block is empty.
|
2009-11-03 21:57:50 +01:00
|
|
|
if (SUnits.empty()) return 0;
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-11-05 02:19:35 +01:00
|
|
|
// For each regclass the next register to use for renaming.
|
|
|
|
RenameOrderType RenameOrder;
|
2009-10-26 20:32:42 +01:00
|
|
|
|
|
|
|
// ...need a map from MI to SUnit.
|
2010-04-20 01:11:58 +02:00
|
|
|
std::map<MachineInstr *, const SUnit *> MISUnitMap;
|
2009-10-26 20:32:42 +01:00
|
|
|
for (unsigned i = 0, e = SUnits.size(); i != e; ++i) {
|
2010-04-20 01:11:58 +02:00
|
|
|
const SUnit *SU = &SUnits[i];
|
|
|
|
MISUnitMap.insert(std::pair<MachineInstr *, const SUnit *>(SU->getInstr(),
|
|
|
|
SU));
|
2009-10-26 20:32:42 +01:00
|
|
|
}
|
|
|
|
|
2009-11-13 20:52:48 +01:00
|
|
|
// Track progress along the critical path through the SUnit graph as
|
|
|
|
// we walk the instructions. This is needed for regclasses that only
|
|
|
|
// break critical-path anti-dependencies.
|
2010-04-20 01:11:58 +02:00
|
|
|
const SUnit *CriticalPathSU = 0;
|
2009-11-13 20:52:48 +01:00
|
|
|
MachineInstr *CriticalPathMI = 0;
|
|
|
|
if (CriticalPathSet.any()) {
|
|
|
|
for (unsigned i = 0, e = SUnits.size(); i != e; ++i) {
|
2010-04-20 01:11:58 +02:00
|
|
|
const SUnit *SU = &SUnits[i];
|
2010-01-06 17:48:02 +01:00
|
|
|
if (!CriticalPathSU ||
|
|
|
|
((SU->getDepth() + SU->Latency) >
|
2009-11-13 20:52:48 +01:00
|
|
|
(CriticalPathSU->getDepth() + CriticalPathSU->Latency))) {
|
|
|
|
CriticalPathSU = SU;
|
|
|
|
}
|
|
|
|
}
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-11-13 20:52:48 +01:00
|
|
|
CriticalPathMI = CriticalPathSU->getInstr();
|
|
|
|
}
|
|
|
|
|
2010-01-06 17:48:02 +01:00
|
|
|
#ifndef NDEBUG
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << "\n===== Aggressive anti-dependency breaking\n");
|
|
|
|
DEBUG(dbgs() << "Available regs:");
|
2009-11-20 20:32:48 +01:00
|
|
|
for (unsigned Reg = 0; Reg < TRI->getNumRegs(); ++Reg) {
|
|
|
|
if (!State->IsLive(Reg))
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << " " << TRI->getName(Reg));
|
2009-10-26 20:32:42 +01:00
|
|
|
}
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << '\n');
|
2009-10-26 20:32:42 +01:00
|
|
|
#endif
|
|
|
|
|
|
|
|
// Attempt to break anti-dependence edges. Walk the instructions
|
|
|
|
// from the bottom up, tracking information about liveness as we go
|
|
|
|
// to help determine which registers are available.
|
|
|
|
unsigned Broken = 0;
|
|
|
|
unsigned Count = InsertPosIndex - 1;
|
|
|
|
for (MachineBasicBlock::iterator I = End, E = Begin;
|
|
|
|
I != E; --Count) {
|
|
|
|
MachineInstr *MI = --I;
|
|
|
|
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << "Anti: ");
|
2009-10-26 20:32:42 +01:00
|
|
|
DEBUG(MI->dump());
|
|
|
|
|
|
|
|
std::set<unsigned> PassthruRegs;
|
|
|
|
GetPassthruRegs(MI, PassthruRegs);
|
|
|
|
|
|
|
|
// Process the defs in MI...
|
|
|
|
PrescanInstruction(MI, Count, PassthruRegs);
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-11-20 20:32:48 +01:00
|
|
|
// The dependence edges that represent anti- and output-
|
2009-11-13 20:52:48 +01:00
|
|
|
// dependencies that are candidates for breaking.
|
2010-04-20 01:11:58 +02:00
|
|
|
std::vector<const SDep *> Edges;
|
|
|
|
const SUnit *PathSU = MISUnitMap[MI];
|
2009-11-20 20:32:48 +01:00
|
|
|
AntiDepEdges(PathSU, Edges);
|
2009-11-13 20:52:48 +01:00
|
|
|
|
|
|
|
// If MI is not on the critical path, then we don't rename
|
|
|
|
// registers in the CriticalPathSet.
|
|
|
|
BitVector *ExcludeRegs = NULL;
|
|
|
|
if (MI == CriticalPathMI) {
|
|
|
|
CriticalPathSU = CriticalPathStep(CriticalPathSU);
|
|
|
|
CriticalPathMI = (CriticalPathSU) ? CriticalPathSU->getInstr() : 0;
|
2010-01-06 17:48:02 +01:00
|
|
|
} else {
|
2009-11-13 20:52:48 +01:00
|
|
|
ExcludeRegs = &CriticalPathSet;
|
|
|
|
}
|
|
|
|
|
2009-10-26 20:32:42 +01:00
|
|
|
// Ignore KILL instructions (they form a group in ScanInstruction
|
|
|
|
// but don't cause any anti-dependence breaking themselves)
|
2010-02-09 20:54:29 +01:00
|
|
|
if (!MI->isKill()) {
|
2009-10-26 20:32:42 +01:00
|
|
|
// Attempt to break each anti-dependency...
|
|
|
|
for (unsigned i = 0, e = Edges.size(); i != e; ++i) {
|
2010-04-20 01:11:58 +02:00
|
|
|
const SDep *Edge = Edges[i];
|
2009-10-26 20:32:42 +01:00
|
|
|
SUnit *NextSU = Edge->getSUnit();
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-11-12 20:08:21 +01:00
|
|
|
if ((Edge->getKind() != SDep::Anti) &&
|
|
|
|
(Edge->getKind() != SDep::Output)) continue;
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-10-26 20:32:42 +01:00
|
|
|
unsigned AntiDepReg = Edge->getReg();
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << "\tAntidep reg: " << TRI->getName(AntiDepReg));
|
2009-10-26 20:32:42 +01:00
|
|
|
assert(AntiDepReg != 0 && "Anti-dependence on reg0?");
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-10-26 20:32:42 +01:00
|
|
|
if (!AllocatableSet.test(AntiDepReg)) {
|
|
|
|
// Don't break anti-dependencies on non-allocatable registers.
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << " (non-allocatable)\n");
|
2009-10-26 20:32:42 +01:00
|
|
|
continue;
|
2009-11-13 20:52:48 +01:00
|
|
|
} else if ((ExcludeRegs != NULL) && ExcludeRegs->test(AntiDepReg)) {
|
|
|
|
// Don't break anti-dependencies for critical path registers
|
|
|
|
// if not on the critical path
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << " (not critical-path)\n");
|
2009-11-13 20:52:48 +01:00
|
|
|
continue;
|
2009-10-26 20:32:42 +01:00
|
|
|
} else if (PassthruRegs.count(AntiDepReg) != 0) {
|
|
|
|
// If the anti-dep register liveness "passes-thru", then
|
|
|
|
// don't try to change it. It will be changed along with
|
|
|
|
// the use if required to break an earlier antidep.
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << " (passthru)\n");
|
2009-10-26 20:32:42 +01:00
|
|
|
continue;
|
|
|
|
} else {
|
|
|
|
// No anti-dep breaking for implicit deps
|
|
|
|
MachineOperand *AntiDepOp = MI->findRegisterDefOperand(AntiDepReg);
|
2010-01-06 17:48:02 +01:00
|
|
|
assert(AntiDepOp != NULL &&
|
|
|
|
"Can't find index for defined register operand");
|
2009-10-26 20:32:42 +01:00
|
|
|
if ((AntiDepOp == NULL) || AntiDepOp->isImplicit()) {
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << " (implicit)\n");
|
2009-10-26 20:32:42 +01:00
|
|
|
continue;
|
|
|
|
}
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-10-26 20:32:42 +01:00
|
|
|
// If the SUnit has other dependencies on the SUnit that
|
|
|
|
// it anti-depends on, don't bother breaking the
|
|
|
|
// anti-dependency since those edges would prevent such
|
|
|
|
// units from being scheduled past each other
|
|
|
|
// regardless.
|
2009-11-20 20:32:48 +01:00
|
|
|
//
|
|
|
|
// Also, if there are dependencies on other SUnits with the
|
|
|
|
// same register as the anti-dependency, don't attempt to
|
|
|
|
// break it.
|
2010-04-20 01:11:58 +02:00
|
|
|
for (SUnit::const_pred_iterator P = PathSU->Preds.begin(),
|
2009-10-26 20:32:42 +01:00
|
|
|
PE = PathSU->Preds.end(); P != PE; ++P) {
|
2009-11-20 20:32:48 +01:00
|
|
|
if (P->getSUnit() == NextSU ?
|
|
|
|
(P->getKind() != SDep::Anti || P->getReg() != AntiDepReg) :
|
|
|
|
(P->getKind() == SDep::Data && P->getReg() == AntiDepReg)) {
|
|
|
|
AntiDepReg = 0;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2010-04-20 01:11:58 +02:00
|
|
|
for (SUnit::const_pred_iterator P = PathSU->Preds.begin(),
|
2009-11-20 20:32:48 +01:00
|
|
|
PE = PathSU->Preds.end(); P != PE; ++P) {
|
|
|
|
if ((P->getSUnit() == NextSU) && (P->getKind() != SDep::Anti) &&
|
|
|
|
(P->getKind() != SDep::Output)) {
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << " (real dependency)\n");
|
2009-10-26 20:32:42 +01:00
|
|
|
AntiDepReg = 0;
|
|
|
|
break;
|
2010-01-06 17:48:02 +01:00
|
|
|
} else if ((P->getSUnit() != NextSU) &&
|
|
|
|
(P->getKind() == SDep::Data) &&
|
2009-11-20 20:32:48 +01:00
|
|
|
(P->getReg() == AntiDepReg)) {
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << " (other dependency)\n");
|
2009-11-20 20:32:48 +01:00
|
|
|
AntiDepReg = 0;
|
|
|
|
break;
|
2009-10-26 20:32:42 +01:00
|
|
|
}
|
|
|
|
}
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-10-26 20:32:42 +01:00
|
|
|
if (AntiDepReg == 0) continue;
|
|
|
|
}
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-10-26 20:32:42 +01:00
|
|
|
assert(AntiDepReg != 0);
|
|
|
|
if (AntiDepReg == 0) continue;
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-10-26 20:32:42 +01:00
|
|
|
// Determine AntiDepReg's register group.
|
2009-10-26 23:31:16 +01:00
|
|
|
const unsigned GroupIndex = State->GetGroup(AntiDepReg);
|
2009-10-26 20:32:42 +01:00
|
|
|
if (GroupIndex == 0) {
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << " (zero group)\n");
|
2009-10-26 20:32:42 +01:00
|
|
|
continue;
|
|
|
|
}
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << '\n');
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-10-26 20:32:42 +01:00
|
|
|
// Look for a suitable register to use to break the anti-dependence.
|
|
|
|
std::map<unsigned, unsigned> RenameMap;
|
2009-11-05 02:19:35 +01:00
|
|
|
if (FindSuitableFreeRegisters(GroupIndex, RenameOrder, RenameMap)) {
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << "\tBreaking anti-dependence edge on "
|
2009-10-26 20:32:42 +01:00
|
|
|
<< TRI->getName(AntiDepReg) << ":");
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-10-26 20:32:42 +01:00
|
|
|
// Handle each group register...
|
|
|
|
for (std::map<unsigned, unsigned>::iterator
|
|
|
|
S = RenameMap.begin(), E = RenameMap.end(); S != E; ++S) {
|
|
|
|
unsigned CurrReg = S->first;
|
|
|
|
unsigned NewReg = S->second;
|
2010-01-06 17:48:02 +01:00
|
|
|
|
|
|
|
DEBUG(dbgs() << " " << TRI->getName(CurrReg) << "->" <<
|
|
|
|
TRI->getName(NewReg) << "(" <<
|
2009-10-26 20:32:42 +01:00
|
|
|
RegRefs.count(CurrReg) << " refs)");
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-10-26 20:32:42 +01:00
|
|
|
// Update the references to the old register CurrReg to
|
|
|
|
// refer to the new register NewReg.
|
2010-01-06 17:48:02 +01:00
|
|
|
std::pair<std::multimap<unsigned,
|
|
|
|
AggressiveAntiDepState::RegisterReference>::iterator,
|
2009-10-26 23:31:16 +01:00
|
|
|
std::multimap<unsigned,
|
2010-01-06 17:48:02 +01:00
|
|
|
AggressiveAntiDepState::RegisterReference>::iterator>
|
2009-10-26 20:32:42 +01:00
|
|
|
Range = RegRefs.equal_range(CurrReg);
|
2010-01-06 17:48:02 +01:00
|
|
|
for (std::multimap<unsigned,
|
|
|
|
AggressiveAntiDepState::RegisterReference>::iterator
|
2009-10-26 20:32:42 +01:00
|
|
|
Q = Range.first, QE = Range.second; Q != QE; ++Q) {
|
|
|
|
Q->second.Operand->setReg(NewReg);
|
2010-06-02 01:48:44 +02:00
|
|
|
// If the SU for the instruction being updated has debug
|
|
|
|
// information related to the anti-dependency register, make
|
|
|
|
// sure to update that as well.
|
|
|
|
const SUnit *SU = MISUnitMap[Q->second.Operand->getParent()];
|
2010-06-02 17:29:36 +02:00
|
|
|
if (!SU) continue;
|
2010-06-02 01:48:44 +02:00
|
|
|
for (unsigned i = 0, e = SU->DbgInstrList.size() ; i < e ; ++i) {
|
|
|
|
MachineInstr *DI = SU->DbgInstrList[i];
|
|
|
|
assert (DI->getNumOperands()==3 && DI->getOperand(0).isReg() &&
|
|
|
|
DI->getOperand(0).getReg()
|
|
|
|
&& "Non register dbg_value attached to SUnit!");
|
|
|
|
if (DI->getOperand(0).getReg() == AntiDepReg)
|
|
|
|
DI->getOperand(0).setReg(NewReg);
|
|
|
|
}
|
2009-10-26 20:32:42 +01:00
|
|
|
}
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-10-26 20:32:42 +01:00
|
|
|
// We just went back in time and modified history; the
|
|
|
|
// liveness information for CurrReg is now inconsistent. Set
|
|
|
|
// the state as if it were dead.
|
2009-10-26 23:31:16 +01:00
|
|
|
State->UnionGroups(NewReg, 0);
|
2009-10-26 20:32:42 +01:00
|
|
|
RegRefs.erase(NewReg);
|
|
|
|
DefIndices[NewReg] = DefIndices[CurrReg];
|
|
|
|
KillIndices[NewReg] = KillIndices[CurrReg];
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-10-26 23:31:16 +01:00
|
|
|
State->UnionGroups(CurrReg, 0);
|
2009-10-26 20:32:42 +01:00
|
|
|
RegRefs.erase(CurrReg);
|
|
|
|
DefIndices[CurrReg] = KillIndices[CurrReg];
|
|
|
|
KillIndices[CurrReg] = ~0u;
|
|
|
|
assert(((KillIndices[CurrReg] == ~0u) !=
|
|
|
|
(DefIndices[CurrReg] == ~0u)) &&
|
|
|
|
"Kill and Def maps aren't consistent for AntiDepReg!");
|
|
|
|
}
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-10-26 20:32:42 +01:00
|
|
|
++Broken;
|
2009-12-24 01:14:25 +01:00
|
|
|
DEBUG(dbgs() << '\n');
|
2009-10-26 20:32:42 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
ScanInstruction(MI, Count);
|
|
|
|
}
|
2010-01-06 17:48:02 +01:00
|
|
|
|
2009-10-26 20:32:42 +01:00
|
|
|
return Broken;
|
|
|
|
}
|