2007-06-08 19:18:56 +02:00
|
|
|
//===-- SimpleRegisterCoalescing.cpp - Register Coalescing ----------------===//
|
|
|
|
//
|
|
|
|
// The LLVM Compiler Infrastructure
|
|
|
|
//
|
2007-12-29 21:36:04 +01:00
|
|
|
// This file is distributed under the University of Illinois Open Source
|
|
|
|
// License. See LICENSE.TXT for details.
|
2007-06-08 19:18:56 +02:00
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
//
|
|
|
|
// This file implements a simple register coalescing pass that attempts to
|
|
|
|
// aggressively coalesce every register copy that it can.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
2007-08-01 00:37:44 +02:00
|
|
|
#define DEBUG_TYPE "regcoalescing"
|
2007-11-05 18:41:38 +01:00
|
|
|
#include "SimpleRegisterCoalescing.h"
|
2007-06-08 19:18:56 +02:00
|
|
|
#include "VirtRegMap.h"
|
2007-11-05 18:41:38 +01:00
|
|
|
#include "llvm/CodeGen/LiveIntervalAnalysis.h"
|
2007-06-08 19:18:56 +02:00
|
|
|
#include "llvm/Value.h"
|
2009-10-10 01:27:56 +02:00
|
|
|
#include "llvm/Analysis/AliasAnalysis.h"
|
2007-06-08 19:18:56 +02:00
|
|
|
#include "llvm/CodeGen/MachineFrameInfo.h"
|
|
|
|
#include "llvm/CodeGen/MachineInstr.h"
|
2007-12-11 03:09:15 +01:00
|
|
|
#include "llvm/CodeGen/MachineLoopInfo.h"
|
2007-12-31 05:13:23 +01:00
|
|
|
#include "llvm/CodeGen/MachineRegisterInfo.h"
|
2007-06-08 19:18:56 +02:00
|
|
|
#include "llvm/CodeGen/Passes.h"
|
2007-09-06 18:18:45 +02:00
|
|
|
#include "llvm/CodeGen/RegisterCoalescer.h"
|
2007-06-08 19:18:56 +02:00
|
|
|
#include "llvm/Target/TargetInstrInfo.h"
|
|
|
|
#include "llvm/Target/TargetMachine.h"
|
2008-10-07 22:22:28 +02:00
|
|
|
#include "llvm/Target/TargetOptions.h"
|
2007-06-08 19:18:56 +02:00
|
|
|
#include "llvm/Support/CommandLine.h"
|
|
|
|
#include "llvm/Support/Debug.h"
|
2009-07-11 22:10:48 +02:00
|
|
|
#include "llvm/Support/ErrorHandling.h"
|
2009-07-25 02:23:56 +02:00
|
|
|
#include "llvm/Support/raw_ostream.h"
|
2010-03-21 02:47:49 +01:00
|
|
|
#include "llvm/ADT/OwningPtr.h"
|
2007-06-08 19:18:56 +02:00
|
|
|
#include "llvm/ADT/SmallSet.h"
|
|
|
|
#include "llvm/ADT/Statistic.h"
|
|
|
|
#include "llvm/ADT/STLExtras.h"
|
|
|
|
#include <algorithm>
|
|
|
|
#include <cmath>
|
|
|
|
using namespace llvm;
|
|
|
|
|
|
|
|
STATISTIC(numJoins , "Number of interval joins performed");
|
2009-01-23 03:15:19 +01:00
|
|
|
STATISTIC(numCrossRCs , "Number of cross class joins performed");
|
2008-02-13 04:01:43 +01:00
|
|
|
STATISTIC(numCommutes , "Number of instruction commuting performed");
|
|
|
|
STATISTIC(numExtends , "Number of copies extended");
|
2008-08-30 11:09:33 +02:00
|
|
|
STATISTIC(NumReMats , "Number of instructions re-materialized");
|
2007-06-08 19:18:56 +02:00
|
|
|
STATISTIC(numPeep , "Number of identity moves eliminated after coalescing");
|
|
|
|
STATISTIC(numAborts , "Number of times interval joining aborted");
|
2009-02-08 08:48:37 +01:00
|
|
|
STATISTIC(numDeadValNo, "Number of valno def marked dead");
|
2007-06-08 19:18:56 +02:00
|
|
|
|
|
|
|
char SimpleRegisterCoalescing::ID = 0;
|
2008-05-13 02:00:25 +02:00
|
|
|
static cl::opt<bool>
|
|
|
|
EnableJoining("join-liveintervals",
|
|
|
|
cl::desc("Coalesce copies (default=true)"),
|
|
|
|
cl::init(true));
|
2007-06-08 19:18:56 +02:00
|
|
|
|
2008-06-19 03:39:21 +02:00
|
|
|
static cl::opt<bool>
|
2009-07-21 02:22:59 +02:00
|
|
|
DisableCrossClassJoin("disable-cross-class-join",
|
|
|
|
cl::desc("Avoid coalescing cross register class copies"),
|
|
|
|
cl::init(false), cl::Hidden);
|
2007-11-06 09:52:21 +01:00
|
|
|
|
2010-08-31 03:27:49 +02:00
|
|
|
static cl::opt<bool>
|
|
|
|
DisablePhysicalJoin("disable-physical-join",
|
|
|
|
cl::desc("Avoid coalescing physical register copies"),
|
|
|
|
cl::init(false), cl::Hidden);
|
|
|
|
|
2010-10-12 21:48:12 +02:00
|
|
|
INITIALIZE_AG_PASS_BEGIN(SimpleRegisterCoalescing, RegisterCoalescer,
|
|
|
|
"simple-register-coalescing", "Simple Register Coalescing",
|
|
|
|
false, false, true)
|
|
|
|
INITIALIZE_PASS_DEPENDENCY(LiveIntervals)
|
|
|
|
INITIALIZE_PASS_DEPENDENCY(SlotIndexes)
|
|
|
|
INITIALIZE_PASS_DEPENDENCY(MachineLoopInfo)
|
|
|
|
INITIALIZE_PASS_DEPENDENCY(StrongPHIElimination)
|
|
|
|
INITIALIZE_PASS_DEPENDENCY(PHIElimination)
|
|
|
|
INITIALIZE_PASS_DEPENDENCY(TwoAddressInstructionPass)
|
|
|
|
INITIALIZE_AG_DEPENDENCY(AliasAnalysis)
|
|
|
|
INITIALIZE_AG_PASS_END(SimpleRegisterCoalescing, RegisterCoalescer,
|
2010-08-23 19:52:01 +02:00
|
|
|
"simple-register-coalescing", "Simple Register Coalescing",
|
2010-10-08 00:25:06 +02:00
|
|
|
false, false, true)
|
2007-06-08 19:18:56 +02:00
|
|
|
|
2010-08-06 20:33:48 +02:00
|
|
|
char &llvm::SimpleRegisterCoalescingID = SimpleRegisterCoalescing::ID;
|
2007-06-08 19:18:56 +02:00
|
|
|
|
|
|
|
void SimpleRegisterCoalescing::getAnalysisUsage(AnalysisUsage &AU) const {
|
2009-08-01 01:37:33 +02:00
|
|
|
AU.setPreservesCFG();
|
2009-10-10 01:27:56 +02:00
|
|
|
AU.addRequired<AliasAnalysis>();
|
2008-09-22 22:58:04 +02:00
|
|
|
AU.addRequired<LiveIntervals>();
|
2007-06-08 19:18:56 +02:00
|
|
|
AU.addPreserved<LiveIntervals>();
|
2009-11-04 00:52:08 +01:00
|
|
|
AU.addPreserved<SlotIndexes>();
|
2008-09-22 22:58:04 +02:00
|
|
|
AU.addRequired<MachineLoopInfo>();
|
2008-01-04 21:54:55 +01:00
|
|
|
AU.addPreserved<MachineLoopInfo>();
|
|
|
|
AU.addPreservedID(MachineDominatorsID);
|
2008-10-07 22:22:28 +02:00
|
|
|
if (StrongPHIElim)
|
|
|
|
AU.addPreservedID(StrongPHIEliminationID);
|
|
|
|
else
|
|
|
|
AU.addPreservedID(PHIEliminationID);
|
2007-06-08 19:18:56 +02:00
|
|
|
AU.addPreservedID(TwoAddressInstructionPassID);
|
|
|
|
MachineFunctionPass::getAnalysisUsage(AU);
|
|
|
|
}
|
|
|
|
|
2007-07-09 14:00:59 +02:00
|
|
|
/// AdjustCopiesBackFrom - We found a non-trivially-coalescable copy with IntA
|
2007-06-08 19:18:56 +02:00
|
|
|
/// being the source and IntB being the dest, thus this defines a value number
|
|
|
|
/// in IntB. If the source value number (in IntA) is defined by a copy from B,
|
|
|
|
/// see if we can merge these two pieces of B into a single value number,
|
|
|
|
/// eliminating a copy. For example:
|
|
|
|
///
|
|
|
|
/// A3 = B0
|
|
|
|
/// ...
|
|
|
|
/// B1 = A3 <- this copy
|
|
|
|
///
|
|
|
|
/// In this case, B0 can be extended to where the B1 copy lives, allowing the B1
|
|
|
|
/// value number to be replaced with B0 (which simplifies the B liveinterval).
|
|
|
|
///
|
|
|
|
/// This returns true if an interval was modified.
|
|
|
|
///
|
2010-06-24 22:16:00 +02:00
|
|
|
bool SimpleRegisterCoalescing::AdjustCopiesBackFrom(const CoalescerPair &CP,
|
2008-01-04 09:59:18 +01:00
|
|
|
MachineInstr *CopyMI) {
|
2010-07-06 22:31:51 +02:00
|
|
|
// Bail if there is no dst interval - can happen when merging physical subreg
|
|
|
|
// operations.
|
|
|
|
if (!li_->hasInterval(CP.getDstReg()))
|
|
|
|
return false;
|
|
|
|
|
2010-06-24 22:16:00 +02:00
|
|
|
LiveInterval &IntA =
|
|
|
|
li_->getInterval(CP.isFlipped() ? CP.getDstReg() : CP.getSrcReg());
|
|
|
|
LiveInterval &IntB =
|
|
|
|
li_->getInterval(CP.isFlipped() ? CP.getSrcReg() : CP.getDstReg());
|
2009-11-04 00:52:08 +01:00
|
|
|
SlotIndex CopyIdx = li_->getInstructionIndex(CopyMI).getDefIndex();
|
2007-06-08 19:18:56 +02:00
|
|
|
|
|
|
|
// BValNo is a value number in B that is defined by a copy from A. 'B3' in
|
|
|
|
// the example above.
|
|
|
|
LiveInterval::iterator BLR = IntB.FindLiveRangeContaining(CopyIdx);
|
2010-07-06 22:31:51 +02:00
|
|
|
if (BLR == IntB.end()) return false;
|
2007-08-29 22:45:00 +02:00
|
|
|
VNInfo *BValNo = BLR->valno;
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2007-06-08 19:18:56 +02:00
|
|
|
// Get the location that B is defined at. Two options: either this value has
|
2009-09-20 04:20:51 +02:00
|
|
|
// an unknown definition point or it is defined at CopyIdx. If unknown, we
|
2007-06-08 19:18:56 +02:00
|
|
|
// can't process it.
|
2010-09-25 20:10:38 +02:00
|
|
|
if (!BValNo->isDefByCopy()) return false;
|
2008-02-15 19:24:29 +01:00
|
|
|
assert(BValNo->def == CopyIdx && "Copy doesn't define the value?");
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2008-02-13 04:01:43 +01:00
|
|
|
// AValNo is the value number in A that defines the copy, A3 in the example.
|
2009-11-04 00:52:08 +01:00
|
|
|
SlotIndex CopyUseIdx = CopyIdx.getUseIndex();
|
2009-08-03 10:41:59 +02:00
|
|
|
LiveInterval::iterator ALR = IntA.FindLiveRangeContaining(CopyUseIdx);
|
2010-06-28 21:39:57 +02:00
|
|
|
// The live range might not exist after fun with physreg coalescing.
|
|
|
|
if (ALR == IntA.end()) return false;
|
2008-02-13 04:01:43 +01:00
|
|
|
VNInfo *AValNo = ALR->valno;
|
Fix PR3149. If an early clobber def is a physical register and it is tied to an input operand, it effectively extends the live range of the physical register. Currently we do not have a good way to represent this.
172 %ECX<def> = MOV32rr %reg1039<kill>
180 INLINEASM <es:subl $5,$1
sbbl $3,$0>, 10, %EAX<def>, 14, %ECX<earlyclobber,def>, 9, %EAX<kill>,
36, <fi#0>, 1, %reg0, 0, 9, %ECX<kill>, 36, <fi#1>, 1, %reg0, 0
188 %EAX<def> = MOV32rr %EAX<kill>
196 %ECX<def> = MOV32rr %ECX<kill>
204 %ECX<def> = MOV32rr %ECX<kill>
212 %EAX<def> = MOV32rr %EAX<kill>
220 %EAX<def> = MOV32rr %EAX
228 %reg1039<def> = MOV32rr %ECX<kill>
The early clobber operand ties ECX input to the ECX def.
The live interval of ECX is represented as this:
%reg20,inf = [46,47:1)[174,230:0) 0@174-(230) 1@46-(47)
The right way to represent this is something like
%reg20,inf = [46,47:2)[174,182:1)[181:230:0) 0@174-(182) 1@181-230 @2@46-(47)
Of course that won't work since that means overlapping live ranges defined by two val#.
The workaround for now is to add a bit to val# which says the val# is redefined by a early clobber def somewhere. This prevents the move at 228 from being optimized away by SimpleRegisterCoalescing::AdjustCopiesBackFrom.
llvm-svn: 61259
2008-12-19 21:58:01 +01:00
|
|
|
// If it's re-defined by an early clobber somewhere in the live range, then
|
|
|
|
// it's not safe to eliminate the copy. FIXME: This is a temporary workaround.
|
|
|
|
// See PR3149:
|
|
|
|
// 172 %ECX<def> = MOV32rr %reg1039<kill>
|
|
|
|
// 180 INLINEASM <es:subl $5,$1
|
2009-12-03 01:50:42 +01:00
|
|
|
// sbbl $3,$0>, 10, %EAX<def>, 14, %ECX<earlyclobber,def>, 9,
|
|
|
|
// %EAX<kill>,
|
Fix PR3149. If an early clobber def is a physical register and it is tied to an input operand, it effectively extends the live range of the physical register. Currently we do not have a good way to represent this.
172 %ECX<def> = MOV32rr %reg1039<kill>
180 INLINEASM <es:subl $5,$1
sbbl $3,$0>, 10, %EAX<def>, 14, %ECX<earlyclobber,def>, 9, %EAX<kill>,
36, <fi#0>, 1, %reg0, 0, 9, %ECX<kill>, 36, <fi#1>, 1, %reg0, 0
188 %EAX<def> = MOV32rr %EAX<kill>
196 %ECX<def> = MOV32rr %ECX<kill>
204 %ECX<def> = MOV32rr %ECX<kill>
212 %EAX<def> = MOV32rr %EAX<kill>
220 %EAX<def> = MOV32rr %EAX
228 %reg1039<def> = MOV32rr %ECX<kill>
The early clobber operand ties ECX input to the ECX def.
The live interval of ECX is represented as this:
%reg20,inf = [46,47:1)[174,230:0) 0@174-(230) 1@46-(47)
The right way to represent this is something like
%reg20,inf = [46,47:2)[174,182:1)[181:230:0) 0@174-(182) 1@181-230 @2@46-(47)
Of course that won't work since that means overlapping live ranges defined by two val#.
The workaround for now is to add a bit to val# which says the val# is redefined by a early clobber def somewhere. This prevents the move at 228 from being optimized away by SimpleRegisterCoalescing::AdjustCopiesBackFrom.
llvm-svn: 61259
2008-12-19 21:58:01 +01:00
|
|
|
// 36, <fi#0>, 1, %reg0, 0, 9, %ECX<kill>, 36, <fi#1>, 1, %reg0, 0
|
|
|
|
// 188 %EAX<def> = MOV32rr %EAX<kill>
|
|
|
|
// 196 %ECX<def> = MOV32rr %ECX<kill>
|
|
|
|
// 204 %ECX<def> = MOV32rr %ECX<kill>
|
|
|
|
// 212 %EAX<def> = MOV32rr %EAX<kill>
|
|
|
|
// 220 %EAX<def> = MOV32rr %EAX
|
|
|
|
// 228 %reg1039<def> = MOV32rr %ECX<kill>
|
|
|
|
// The early clobber operand ties ECX input to the ECX def.
|
|
|
|
//
|
|
|
|
// The live interval of ECX is represented as this:
|
|
|
|
// %reg20,inf = [46,47:1)[174,230:0) 0@174-(230) 1@46-(47)
|
|
|
|
// The coalescer has no idea there was a def in the middle of [174,230].
|
2009-06-17 23:01:20 +02:00
|
|
|
if (AValNo->hasRedefByEC())
|
Fix PR3149. If an early clobber def is a physical register and it is tied to an input operand, it effectively extends the live range of the physical register. Currently we do not have a good way to represent this.
172 %ECX<def> = MOV32rr %reg1039<kill>
180 INLINEASM <es:subl $5,$1
sbbl $3,$0>, 10, %EAX<def>, 14, %ECX<earlyclobber,def>, 9, %EAX<kill>,
36, <fi#0>, 1, %reg0, 0, 9, %ECX<kill>, 36, <fi#1>, 1, %reg0, 0
188 %EAX<def> = MOV32rr %EAX<kill>
196 %ECX<def> = MOV32rr %ECX<kill>
204 %ECX<def> = MOV32rr %ECX<kill>
212 %EAX<def> = MOV32rr %EAX<kill>
220 %EAX<def> = MOV32rr %EAX
228 %reg1039<def> = MOV32rr %ECX<kill>
The early clobber operand ties ECX input to the ECX def.
The live interval of ECX is represented as this:
%reg20,inf = [46,47:1)[174,230:0) 0@174-(230) 1@46-(47)
The right way to represent this is something like
%reg20,inf = [46,47:2)[174,182:1)[181:230:0) 0@174-(182) 1@181-230 @2@46-(47)
Of course that won't work since that means overlapping live ranges defined by two val#.
The workaround for now is to add a bit to val# which says the val# is redefined by a early clobber def somewhere. This prevents the move at 228 from being optimized away by SimpleRegisterCoalescing::AdjustCopiesBackFrom.
llvm-svn: 61259
2008-12-19 21:58:01 +01:00
|
|
|
return false;
|
2009-09-20 04:20:51 +02:00
|
|
|
|
|
|
|
// If AValNo is defined as a copy from IntB, we can potentially process this.
|
2007-06-08 19:18:56 +02:00
|
|
|
// Get the instruction that defines this value number.
|
2010-06-24 22:16:00 +02:00
|
|
|
if (!CP.isCoalescable(AValNo->getCopy()))
|
|
|
|
return false;
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2007-06-08 19:18:56 +02:00
|
|
|
// Get the LiveRange in IntB that this value number starts with.
|
2009-09-04 22:41:11 +02:00
|
|
|
LiveInterval::iterator ValLR =
|
2009-11-04 00:52:08 +01:00
|
|
|
IntB.FindLiveRangeContaining(AValNo->def.getPrevSlot());
|
2010-06-24 22:16:00 +02:00
|
|
|
if (ValLR == IntB.end())
|
|
|
|
return false;
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2007-06-08 19:18:56 +02:00
|
|
|
// Make sure that the end of the live range is inside the same block as
|
|
|
|
// CopyMI.
|
2009-09-04 22:41:11 +02:00
|
|
|
MachineInstr *ValLREndInst =
|
2009-11-04 00:52:08 +01:00
|
|
|
li_->getInstructionFromIndex(ValLR->end.getPrevSlot());
|
2010-06-24 22:16:00 +02:00
|
|
|
if (!ValLREndInst || ValLREndInst->getParent() != CopyMI->getParent())
|
|
|
|
return false;
|
2007-06-08 19:18:56 +02:00
|
|
|
|
|
|
|
// Okay, we now know that ValLR ends in the same block that the CopyMI
|
|
|
|
// live-range starts. If there are no intervening live ranges between them in
|
|
|
|
// IntB, we can merge them.
|
|
|
|
if (ValLR+1 != BLR) return false;
|
2007-08-15 01:19:28 +02:00
|
|
|
|
|
|
|
// If a live interval is a physical register, conservatively check if any
|
|
|
|
// of its sub-registers is overlapping the live interval of the virtual
|
|
|
|
// register. If so, do not coalesce.
|
2008-02-10 19:45:23 +01:00
|
|
|
if (TargetRegisterInfo::isPhysicalRegister(IntB.reg) &&
|
|
|
|
*tri_->getSubRegisters(IntB.reg)) {
|
|
|
|
for (const unsigned* SR = tri_->getSubRegisters(IntB.reg); *SR; ++SR)
|
2007-08-15 01:19:28 +02:00
|
|
|
if (li_->hasInterval(*SR) && IntA.overlaps(li_->getInterval(*SR))) {
|
2009-08-22 22:52:46 +02:00
|
|
|
DEBUG({
|
2010-04-30 00:21:48 +02:00
|
|
|
dbgs() << "\t\tInterfere with sub-register ";
|
2010-01-05 02:25:58 +01:00
|
|
|
li_->getInterval(*SR).print(dbgs(), tri_);
|
2009-08-22 22:52:46 +02:00
|
|
|
});
|
2007-08-15 01:19:28 +02:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2009-08-22 22:52:46 +02:00
|
|
|
DEBUG({
|
2010-04-30 00:21:48 +02:00
|
|
|
dbgs() << "Extending: ";
|
2010-01-05 02:25:58 +01:00
|
|
|
IntB.print(dbgs(), tri_);
|
2009-08-22 22:52:46 +02:00
|
|
|
});
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2009-11-04 00:52:08 +01:00
|
|
|
SlotIndex FillerStart = ValLR->end, FillerEnd = BLR->start;
|
2007-06-08 19:18:56 +02:00
|
|
|
// We are about to delete CopyMI, so need to remove it as the 'instruction
|
2010-02-10 17:03:48 +01:00
|
|
|
// that defines this value #'. Update the valnum with the new defining
|
2007-08-08 01:49:57 +02:00
|
|
|
// instruction #.
|
2008-02-15 19:24:29 +01:00
|
|
|
BValNo->def = FillerStart;
|
2009-08-11 01:43:28 +02:00
|
|
|
BValNo->setCopy(0);
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2007-06-08 19:18:56 +02:00
|
|
|
// Okay, we can merge them. We need to insert a new liverange:
|
|
|
|
// [ValLR.end, BLR.begin) of either value number, then we merge the
|
|
|
|
// two value numbers.
|
|
|
|
IntB.addRange(LiveRange(FillerStart, FillerEnd, BValNo));
|
|
|
|
|
|
|
|
// If the IntB live range is assigned to a physical register, and if that
|
2009-09-20 04:20:51 +02:00
|
|
|
// physreg has sub-registers, update their live intervals as well.
|
2008-02-10 19:45:23 +01:00
|
|
|
if (TargetRegisterInfo::isPhysicalRegister(IntB.reg)) {
|
2009-03-11 01:03:21 +01:00
|
|
|
for (const unsigned *SR = tri_->getSubRegisters(IntB.reg); *SR; ++SR) {
|
2010-07-07 16:41:22 +02:00
|
|
|
if (!li_->hasInterval(*SR))
|
|
|
|
continue;
|
2009-03-11 01:03:21 +01:00
|
|
|
LiveInterval &SRLI = li_->getInterval(*SR);
|
|
|
|
SRLI.addRange(LiveRange(FillerStart, FillerEnd,
|
2010-09-25 14:04:16 +02:00
|
|
|
SRLI.getNextValue(FillerStart, 0,
|
2009-06-17 23:01:20 +02:00
|
|
|
li_->getVNInfoAllocator())));
|
2007-06-08 19:18:56 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Okay, merge "B1" into the same value number as "B0".
|
2008-09-15 08:28:41 +02:00
|
|
|
if (BValNo != ValLR->valno) {
|
2007-08-29 22:45:00 +02:00
|
|
|
IntB.MergeValueNumberInto(BValNo, ValLR->valno);
|
2008-09-15 08:28:41 +02:00
|
|
|
}
|
2009-08-22 22:52:46 +02:00
|
|
|
DEBUG({
|
2010-01-05 02:25:58 +01:00
|
|
|
dbgs() << " result = ";
|
|
|
|
IntB.print(dbgs(), tri_);
|
|
|
|
dbgs() << "\n";
|
2009-08-22 22:52:46 +02:00
|
|
|
});
|
2007-06-08 19:18:56 +02:00
|
|
|
|
|
|
|
// If the source instruction was killing the source register before the
|
|
|
|
// merge, unset the isKill marker given the live range has been extended.
|
|
|
|
int UIdx = ValLREndInst->findRegisterUseOperandIdx(IntB.reg, true);
|
2008-09-15 08:28:41 +02:00
|
|
|
if (UIdx != -1) {
|
2007-12-30 22:56:09 +01:00
|
|
|
ValLREndInst->getOperand(UIdx).setIsKill(false);
|
2008-09-15 08:28:41 +02:00
|
|
|
}
|
2008-02-13 04:01:43 +01:00
|
|
|
|
2009-08-03 10:41:59 +02:00
|
|
|
// If the copy instruction was killing the destination register before the
|
|
|
|
// merge, find the last use and trim the live range. That will also add the
|
|
|
|
// isKill marker.
|
2010-06-26 00:53:05 +02:00
|
|
|
if (ALR->end == CopyIdx)
|
2009-08-03 10:41:59 +02:00
|
|
|
TrimLiveIntervalToLastUse(CopyUseIdx, CopyMI->getParent(), IntA, ALR);
|
|
|
|
|
2008-02-13 04:01:43 +01:00
|
|
|
++numExtends;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2008-02-16 03:32:17 +01:00
|
|
|
/// HasOtherReachingDefs - Return true if there are definitions of IntB
|
|
|
|
/// other than BValNo val# that can reach uses of AValno val# of IntA.
|
|
|
|
bool SimpleRegisterCoalescing::HasOtherReachingDefs(LiveInterval &IntA,
|
|
|
|
LiveInterval &IntB,
|
|
|
|
VNInfo *AValNo,
|
|
|
|
VNInfo *BValNo) {
|
|
|
|
for (LiveInterval::iterator AI = IntA.begin(), AE = IntA.end();
|
|
|
|
AI != AE; ++AI) {
|
|
|
|
if (AI->valno != AValNo) continue;
|
|
|
|
LiveInterval::Ranges::iterator BI =
|
|
|
|
std::upper_bound(IntB.ranges.begin(), IntB.ranges.end(), AI->start);
|
|
|
|
if (BI != IntB.ranges.begin())
|
|
|
|
--BI;
|
|
|
|
for (; BI != IntB.ranges.end() && AI->end >= BI->start; ++BI) {
|
|
|
|
if (BI->valno == BValNo)
|
|
|
|
continue;
|
|
|
|
if (BI->start <= AI->start && BI->end > AI->start)
|
|
|
|
return true;
|
|
|
|
if (BI->start > AI->start && BI->start < AI->end)
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2009-12-03 01:50:42 +01:00
|
|
|
/// RemoveCopyByCommutingDef - We found a non-trivially-coalescable copy with
|
|
|
|
/// IntA being the source and IntB being the dest, thus this defines a value
|
|
|
|
/// number in IntB. If the source value number (in IntA) is defined by a
|
|
|
|
/// commutable instruction and its other operand is coalesced to the copy dest
|
|
|
|
/// register, see if we can transform the copy into a noop by commuting the
|
|
|
|
/// definition. For example,
|
2008-02-13 04:01:43 +01:00
|
|
|
///
|
|
|
|
/// A3 = op A2 B0<kill>
|
|
|
|
/// ...
|
|
|
|
/// B1 = A3 <- this copy
|
|
|
|
/// ...
|
|
|
|
/// = op A3 <- more uses
|
|
|
|
///
|
|
|
|
/// ==>
|
|
|
|
///
|
|
|
|
/// B2 = op B0 A2<kill>
|
|
|
|
/// ...
|
|
|
|
/// B1 = B2 <- now an identify copy
|
|
|
|
/// ...
|
|
|
|
/// = op B2 <- more uses
|
|
|
|
///
|
|
|
|
/// This returns true if an interval was modified.
|
|
|
|
///
|
2010-07-06 22:31:51 +02:00
|
|
|
bool SimpleRegisterCoalescing::RemoveCopyByCommutingDef(const CoalescerPair &CP,
|
2008-02-13 04:01:43 +01:00
|
|
|
MachineInstr *CopyMI) {
|
2008-02-18 19:56:31 +01:00
|
|
|
// FIXME: For now, only eliminate the copy by commuting its def when the
|
|
|
|
// source register is a virtual register. We want to guard against cases
|
|
|
|
// where the copy is a back edge copy and commuting the def lengthen the
|
|
|
|
// live interval of the source register to the entire loop.
|
2010-07-06 22:31:51 +02:00
|
|
|
if (CP.isPhys() && CP.isFlipped())
|
2008-02-18 09:40:53 +01:00
|
|
|
return false;
|
|
|
|
|
2010-07-06 22:31:51 +02:00
|
|
|
// Bail if there is no dst interval.
|
|
|
|
if (!li_->hasInterval(CP.getDstReg()))
|
|
|
|
return false;
|
|
|
|
|
2010-10-02 01:52:27 +02:00
|
|
|
SlotIndex CopyIdx = li_->getInstructionIndex(CopyMI).getDefIndex();
|
2010-07-06 22:31:51 +02:00
|
|
|
|
|
|
|
LiveInterval &IntA =
|
|
|
|
li_->getInterval(CP.isFlipped() ? CP.getDstReg() : CP.getSrcReg());
|
|
|
|
LiveInterval &IntB =
|
|
|
|
li_->getInterval(CP.isFlipped() ? CP.getSrcReg() : CP.getDstReg());
|
|
|
|
|
2008-02-15 19:24:29 +01:00
|
|
|
// BValNo is a value number in B that is defined by a copy from A. 'B3' in
|
2008-02-13 04:01:43 +01:00
|
|
|
// the example above.
|
2010-10-02 01:52:27 +02:00
|
|
|
VNInfo *BValNo = IntB.getVNInfoAt(CopyIdx);
|
|
|
|
if (!BValNo || !BValNo->isDefByCopy())
|
|
|
|
return false;
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2008-02-13 04:01:43 +01:00
|
|
|
assert(BValNo->def == CopyIdx && "Copy doesn't define the value?");
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2008-02-13 04:01:43 +01:00
|
|
|
// AValNo is the value number in A that defines the copy, A3 in the example.
|
2010-10-02 01:52:27 +02:00
|
|
|
VNInfo *AValNo = IntA.getVNInfoAt(CopyIdx.getUseIndex());
|
|
|
|
assert(AValNo && "COPY source not live");
|
2009-09-04 22:41:11 +02:00
|
|
|
|
2008-02-13 09:41:08 +01:00
|
|
|
// If other defs can reach uses of this def, then it's not safe to perform
|
2010-09-25 14:04:16 +02:00
|
|
|
// the optimization.
|
2010-09-26 05:37:09 +02:00
|
|
|
if (AValNo->isPHIDef() || AValNo->isUnused() || AValNo->hasPHIKill())
|
2010-09-25 14:04:16 +02:00
|
|
|
return false;
|
2010-09-26 05:37:09 +02:00
|
|
|
MachineInstr *DefMI = li_->getInstructionFromIndex(AValNo->def);
|
2010-07-06 23:06:39 +02:00
|
|
|
if (!DefMI)
|
|
|
|
return false;
|
2008-02-13 04:01:43 +01:00
|
|
|
const TargetInstrDesc &TID = DefMI->getDesc();
|
2009-07-10 21:15:51 +02:00
|
|
|
if (!TID.isCommutable())
|
|
|
|
return false;
|
|
|
|
// If DefMI is a two-address instruction then commuting it will change the
|
|
|
|
// destination register.
|
|
|
|
int DefIdx = DefMI->findRegisterDefOperandIdx(IntA.reg);
|
|
|
|
assert(DefIdx != -1);
|
|
|
|
unsigned UseOpIdx;
|
|
|
|
if (!DefMI->isRegTiedToUseOperand(DefIdx, &UseOpIdx))
|
|
|
|
return false;
|
|
|
|
unsigned Op1, Op2, NewDstIdx;
|
|
|
|
if (!tii_->findCommutedOpIndices(DefMI, Op1, Op2))
|
|
|
|
return false;
|
|
|
|
if (Op1 == UseOpIdx)
|
|
|
|
NewDstIdx = Op2;
|
|
|
|
else if (Op2 == UseOpIdx)
|
|
|
|
NewDstIdx = Op1;
|
|
|
|
else
|
2008-02-13 04:01:43 +01:00
|
|
|
return false;
|
|
|
|
|
2008-02-15 19:24:29 +01:00
|
|
|
MachineOperand &NewDstMO = DefMI->getOperand(NewDstIdx);
|
|
|
|
unsigned NewReg = NewDstMO.getReg();
|
|
|
|
if (NewReg != IntB.reg || !NewDstMO.isKill())
|
2008-02-13 04:01:43 +01:00
|
|
|
return false;
|
|
|
|
|
|
|
|
// Make sure there are no other definitions of IntB that would reach the
|
|
|
|
// uses which the new definition can reach.
|
2008-02-16 03:32:17 +01:00
|
|
|
if (HasOtherReachingDefs(IntA, IntB, AValNo, BValNo))
|
|
|
|
return false;
|
2008-02-13 04:01:43 +01:00
|
|
|
|
2010-09-02 00:15:35 +02:00
|
|
|
// Abort if the aliases of IntB.reg have values that are not simply the
|
2010-05-04 00:40:32 +02:00
|
|
|
// clobbers from the superreg.
|
2010-09-02 00:15:35 +02:00
|
|
|
if (TargetRegisterInfo::isPhysicalRegister(IntB.reg))
|
|
|
|
for (const unsigned *AS = tri_->getAliasSet(IntB.reg); *AS; ++AS)
|
|
|
|
if (li_->hasInterval(*AS) &&
|
|
|
|
HasOtherReachingDefs(IntA, li_->getInterval(*AS), AValNo, 0))
|
2010-05-04 00:40:32 +02:00
|
|
|
return false;
|
|
|
|
|
2008-03-26 20:03:01 +01:00
|
|
|
// If some of the uses of IntA.reg is already coalesced away, return false.
|
|
|
|
// It's not possible to determine whether it's safe to perform the coalescing.
|
2010-02-10 01:55:42 +01:00
|
|
|
for (MachineRegisterInfo::use_nodbg_iterator UI =
|
|
|
|
mri_->use_nodbg_begin(IntA.reg),
|
|
|
|
UE = mri_->use_nodbg_end(); UI != UE; ++UI) {
|
2008-03-26 20:03:01 +01:00
|
|
|
MachineInstr *UseMI = &*UI;
|
2009-11-04 00:52:08 +01:00
|
|
|
SlotIndex UseIdx = li_->getInstructionIndex(UseMI);
|
2008-03-26 20:03:01 +01:00
|
|
|
LiveInterval::iterator ULR = IntA.FindLiveRangeContaining(UseIdx);
|
2008-04-16 20:48:43 +02:00
|
|
|
if (ULR == IntA.end())
|
|
|
|
continue;
|
2008-03-26 20:03:01 +01:00
|
|
|
if (ULR->valno == AValNo && JoinedCopies.count(UseMI))
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2010-10-02 01:52:22 +02:00
|
|
|
DEBUG(dbgs() << "\tRemoveCopyByCommutingDef: " << AValNo->def << '\t'
|
2010-10-02 01:52:27 +02:00
|
|
|
<< *DefMI);
|
2010-09-02 00:15:35 +02:00
|
|
|
|
2008-02-13 10:56:03 +01:00
|
|
|
// At this point we have decided that it is legal to do this
|
|
|
|
// transformation. Start by commuting the instruction.
|
2008-02-13 04:01:43 +01:00
|
|
|
MachineBasicBlock *MBB = DefMI->getParent();
|
|
|
|
MachineInstr *NewMI = tii_->commuteInstruction(DefMI);
|
2008-02-16 03:32:17 +01:00
|
|
|
if (!NewMI)
|
|
|
|
return false;
|
2008-02-13 04:01:43 +01:00
|
|
|
if (NewMI != DefMI) {
|
|
|
|
li_->ReplaceMachineInstrInMaps(DefMI, NewMI);
|
|
|
|
MBB->insert(DefMI, NewMI);
|
|
|
|
MBB->erase(DefMI);
|
|
|
|
}
|
2008-03-05 01:59:57 +01:00
|
|
|
unsigned OpIdx = NewMI->findRegisterUseOperandIdx(IntA.reg, false);
|
2008-02-13 04:01:43 +01:00
|
|
|
NewMI->getOperand(OpIdx).setIsKill();
|
|
|
|
|
2008-03-10 09:11:32 +01:00
|
|
|
// If ALR and BLR overlaps and end of BLR extends beyond end of ALR, e.g.
|
|
|
|
// A = or A, B
|
|
|
|
// ...
|
|
|
|
// B = A
|
|
|
|
// ...
|
|
|
|
// C = A<kill>
|
|
|
|
// ...
|
|
|
|
// = B
|
|
|
|
|
|
|
|
// Update uses of IntA of the specific Val# with IntB.
|
2008-02-13 04:01:43 +01:00
|
|
|
for (MachineRegisterInfo::use_iterator UI = mri_->use_begin(IntA.reg),
|
|
|
|
UE = mri_->use_end(); UI != UE;) {
|
|
|
|
MachineOperand &UseMO = UI.getOperand();
|
2008-02-13 10:56:03 +01:00
|
|
|
MachineInstr *UseMI = &*UI;
|
2008-02-13 04:01:43 +01:00
|
|
|
++UI;
|
2008-02-13 10:56:03 +01:00
|
|
|
if (JoinedCopies.count(UseMI))
|
2008-03-26 20:03:01 +01:00
|
|
|
continue;
|
2010-02-10 01:55:42 +01:00
|
|
|
if (UseMI->isDebugValue()) {
|
|
|
|
// FIXME These don't have an instruction index. Not clear we have enough
|
|
|
|
// info to decide whether to do this replacement or not. For now do it.
|
|
|
|
UseMO.setReg(NewReg);
|
|
|
|
continue;
|
|
|
|
}
|
2009-11-04 00:52:08 +01:00
|
|
|
SlotIndex UseIdx = li_->getInstructionIndex(UseMI).getUseIndex();
|
2008-02-13 04:01:43 +01:00
|
|
|
LiveInterval::iterator ULR = IntA.FindLiveRangeContaining(UseIdx);
|
2008-04-16 20:48:43 +02:00
|
|
|
if (ULR == IntA.end() || ULR->valno != AValNo)
|
2008-02-13 04:01:43 +01:00
|
|
|
continue;
|
2010-07-09 07:56:21 +02:00
|
|
|
if (TargetRegisterInfo::isPhysicalRegister(NewReg))
|
|
|
|
UseMO.substPhysReg(NewReg, *tri_);
|
|
|
|
else
|
|
|
|
UseMO.setReg(NewReg);
|
2008-02-13 10:56:03 +01:00
|
|
|
if (UseMI == CopyMI)
|
|
|
|
continue;
|
2010-07-16 06:45:42 +02:00
|
|
|
if (!UseMI->isCopy())
|
|
|
|
continue;
|
|
|
|
if (UseMI->getOperand(0).getReg() != IntB.reg ||
|
|
|
|
UseMI->getOperand(0).getSubReg())
|
2008-02-13 10:56:03 +01:00
|
|
|
continue;
|
2010-09-02 00:15:35 +02:00
|
|
|
|
2010-10-02 01:52:25 +02:00
|
|
|
// This copy will become a noop. If it's defining a new val#, merge it into
|
|
|
|
// BValNo.
|
2010-07-03 02:04:37 +02:00
|
|
|
SlotIndex DefIdx = UseIdx.getDefIndex();
|
2010-10-02 01:52:25 +02:00
|
|
|
VNInfo *DVNI = IntB.getVNInfoAt(DefIdx);
|
|
|
|
if (!DVNI)
|
2010-07-09 06:35:38 +02:00
|
|
|
continue;
|
2010-10-02 01:52:22 +02:00
|
|
|
DEBUG(dbgs() << "\t\tnoop: " << DefIdx << '\t' << *UseMI);
|
2010-10-02 01:52:25 +02:00
|
|
|
assert(DVNI->def == DefIdx);
|
|
|
|
BValNo = IntB.MergeValueNumberInto(BValNo, DVNI);
|
2010-07-03 02:04:37 +02:00
|
|
|
JoinedCopies.insert(UseMI);
|
2008-02-13 04:01:43 +01:00
|
|
|
}
|
|
|
|
|
2008-06-17 22:11:16 +02:00
|
|
|
// Extend BValNo by merging in IntA live ranges of AValNo. Val# definition
|
2010-06-26 00:53:05 +02:00
|
|
|
// is updated.
|
2008-06-17 22:11:16 +02:00
|
|
|
VNInfo *ValNo = BValNo;
|
|
|
|
ValNo->def = AValNo->def;
|
2009-08-11 01:43:28 +02:00
|
|
|
ValNo->setCopy(0);
|
2008-02-13 04:01:43 +01:00
|
|
|
for (LiveInterval::iterator AI = IntA.begin(), AE = IntA.end();
|
|
|
|
AI != AE; ++AI) {
|
|
|
|
if (AI->valno != AValNo) continue;
|
2010-10-02 01:52:25 +02:00
|
|
|
IntB.addRange(LiveRange(AI->start, AI->end, ValNo));
|
2008-02-13 04:01:43 +01:00
|
|
|
}
|
2010-10-02 01:52:22 +02:00
|
|
|
DEBUG(dbgs() << "\t\textended: " << IntB << '\n');
|
2008-02-13 04:01:43 +01:00
|
|
|
|
|
|
|
IntA.removeValNo(AValNo);
|
2010-10-02 01:52:22 +02:00
|
|
|
DEBUG(dbgs() << "\t\ttrimmed: " << IntA << '\n');
|
2008-02-13 04:01:43 +01:00
|
|
|
++numCommutes;
|
2007-06-08 19:18:56 +02:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2009-02-05 09:45:04 +01:00
|
|
|
/// isSameOrFallThroughBB - Return true if MBB == SuccMBB or MBB simply
|
|
|
|
/// fallthoughs to SuccMBB.
|
|
|
|
static bool isSameOrFallThroughBB(MachineBasicBlock *MBB,
|
|
|
|
MachineBasicBlock *SuccMBB,
|
|
|
|
const TargetInstrInfo *tii_) {
|
|
|
|
if (MBB == SuccMBB)
|
|
|
|
return true;
|
|
|
|
MachineBasicBlock *TBB = 0, *FBB = 0;
|
|
|
|
SmallVector<MachineOperand, 4> Cond;
|
|
|
|
return !tii_->AnalyzeBranch(*MBB, TBB, FBB, Cond) && !TBB && !FBB &&
|
|
|
|
MBB->isSuccessor(SuccMBB);
|
|
|
|
}
|
|
|
|
|
|
|
|
/// removeRange - Wrapper for LiveInterval::removeRange. This removes a range
|
|
|
|
/// from a physical register live interval as well as from the live intervals
|
|
|
|
/// of its sub-registers.
|
2009-09-04 22:41:11 +02:00
|
|
|
static void removeRange(LiveInterval &li,
|
2009-11-04 00:52:08 +01:00
|
|
|
SlotIndex Start, SlotIndex End,
|
2009-02-05 09:45:04 +01:00
|
|
|
LiveIntervals *li_, const TargetRegisterInfo *tri_) {
|
|
|
|
li.removeRange(Start, End, true);
|
|
|
|
if (TargetRegisterInfo::isPhysicalRegister(li.reg)) {
|
|
|
|
for (const unsigned* SR = tri_->getSubRegisters(li.reg); *SR; ++SR) {
|
|
|
|
if (!li_->hasInterval(*SR))
|
|
|
|
continue;
|
|
|
|
LiveInterval &sli = li_->getInterval(*SR);
|
2009-11-04 00:52:08 +01:00
|
|
|
SlotIndex RemoveStart = Start;
|
|
|
|
SlotIndex RemoveEnd = Start;
|
|
|
|
|
2009-02-05 09:45:04 +01:00
|
|
|
while (RemoveEnd != End) {
|
2009-09-17 02:57:15 +02:00
|
|
|
LiveInterval::iterator LR = sli.FindLiveRangeContaining(RemoveStart);
|
2009-02-05 09:45:04 +01:00
|
|
|
if (LR == sli.end())
|
|
|
|
break;
|
|
|
|
RemoveEnd = (LR->end < End) ? LR->end : End;
|
2009-09-17 02:57:15 +02:00
|
|
|
sli.removeRange(RemoveStart, RemoveEnd, true);
|
|
|
|
RemoveStart = RemoveEnd;
|
2009-02-05 09:45:04 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// TrimLiveIntervalToLastUse - If there is a last use in the same basic block
|
2009-02-08 09:24:28 +01:00
|
|
|
/// as the copy instruction, trim the live interval to the last use and return
|
2009-02-05 09:45:04 +01:00
|
|
|
/// true.
|
|
|
|
bool
|
2009-11-04 00:52:08 +01:00
|
|
|
SimpleRegisterCoalescing::TrimLiveIntervalToLastUse(SlotIndex CopyIdx,
|
2009-02-05 09:45:04 +01:00
|
|
|
MachineBasicBlock *CopyMBB,
|
|
|
|
LiveInterval &li,
|
|
|
|
const LiveRange *LR) {
|
2009-11-04 00:52:08 +01:00
|
|
|
SlotIndex MBBStart = li_->getMBBStartIdx(CopyMBB);
|
|
|
|
SlotIndex LastUseIdx;
|
2009-09-04 22:41:11 +02:00
|
|
|
MachineOperand *LastUse =
|
2009-11-04 00:52:08 +01:00
|
|
|
lastRegisterUse(LR->start, CopyIdx.getPrevSlot(), li.reg, LastUseIdx);
|
2009-02-05 09:45:04 +01:00
|
|
|
if (LastUse) {
|
|
|
|
MachineInstr *LastUseMI = LastUse->getParent();
|
|
|
|
if (!isSameOrFallThroughBB(LastUseMI->getParent(), CopyMBB, tii_)) {
|
|
|
|
// r1024 = op
|
|
|
|
// ...
|
|
|
|
// BB1:
|
|
|
|
// = r1024
|
|
|
|
//
|
|
|
|
// BB2:
|
|
|
|
// r1025<dead> = r1024<kill>
|
|
|
|
if (MBBStart < LR->end)
|
|
|
|
removeRange(li, MBBStart, LR->end, li_, tri_);
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
// There are uses before the copy, just shorten the live range to the end
|
|
|
|
// of last use.
|
|
|
|
LastUse->setIsKill();
|
2009-11-04 00:52:08 +01:00
|
|
|
removeRange(li, LastUseIdx.getDefIndex(), LR->end, li_, tri_);
|
2010-07-09 03:27:21 +02:00
|
|
|
if (LastUseMI->isCopy()) {
|
|
|
|
MachineOperand &DefMO = LastUseMI->getOperand(0);
|
|
|
|
if (DefMO.getReg() == li.reg && !DefMO.getSubReg())
|
|
|
|
DefMO.setIsDead();
|
|
|
|
}
|
2009-02-05 09:45:04 +01:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Is it livein?
|
|
|
|
if (LR->start <= MBBStart && LR->end > MBBStart) {
|
2009-11-04 00:52:08 +01:00
|
|
|
if (LR->start == li_->getZeroIndex()) {
|
2009-02-05 09:45:04 +01:00
|
|
|
assert(TargetRegisterInfo::isPhysicalRegister(li.reg));
|
|
|
|
// Live-in to the function but dead. Remove it from entry live-in set.
|
|
|
|
mf_->begin()->removeLiveIn(li.reg);
|
|
|
|
}
|
|
|
|
// FIXME: Shorten intervals in BBs that reaches this BB.
|
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2008-08-30 11:09:33 +02:00
|
|
|
/// ReMaterializeTrivialDef - If the source of a copy is defined by a trivial
|
|
|
|
/// computation, replace the copy by rematerialize the definition.
|
|
|
|
bool SimpleRegisterCoalescing::ReMaterializeTrivialDef(LiveInterval &SrcInt,
|
|
|
|
unsigned DstReg,
|
2009-07-16 11:20:10 +02:00
|
|
|
unsigned DstSubIdx,
|
2008-08-30 11:09:33 +02:00
|
|
|
MachineInstr *CopyMI) {
|
2009-11-04 00:52:08 +01:00
|
|
|
SlotIndex CopyIdx = li_->getInstructionIndex(CopyMI).getUseIndex();
|
2008-08-30 11:09:33 +02:00
|
|
|
LiveInterval::iterator SrcLR = SrcInt.FindLiveRangeContaining(CopyIdx);
|
2009-01-13 21:25:24 +01:00
|
|
|
assert(SrcLR != SrcInt.end() && "Live range not found!");
|
2008-08-30 11:09:33 +02:00
|
|
|
VNInfo *ValNo = SrcLR->valno;
|
|
|
|
// If other defs can reach uses of this def, then it's not safe to perform
|
2010-09-25 14:04:16 +02:00
|
|
|
// the optimization.
|
2010-09-26 05:37:09 +02:00
|
|
|
if (ValNo->isPHIDef() || ValNo->isUnused() || ValNo->hasPHIKill())
|
2008-08-30 11:09:33 +02:00
|
|
|
return false;
|
|
|
|
MachineInstr *DefMI = li_->getInstructionFromIndex(ValNo->def);
|
2010-09-26 05:37:09 +02:00
|
|
|
if (!DefMI)
|
|
|
|
return false;
|
2010-06-24 20:15:01 +02:00
|
|
|
assert(DefMI && "Defining instruction disappeared");
|
2008-08-30 11:09:33 +02:00
|
|
|
const TargetInstrDesc &TID = DefMI->getDesc();
|
|
|
|
if (!TID.isAsCheapAsAMove())
|
|
|
|
return false;
|
2009-10-10 01:27:56 +02:00
|
|
|
if (!tii_->isTriviallyReMaterializable(DefMI, AA))
|
2009-02-05 23:24:17 +01:00
|
|
|
return false;
|
2008-08-30 11:09:33 +02:00
|
|
|
bool SawStore = false;
|
2010-03-02 20:03:01 +01:00
|
|
|
if (!DefMI->isSafeToMove(tii_, AA, SawStore))
|
2008-08-30 11:09:33 +02:00
|
|
|
return false;
|
2009-07-14 02:51:06 +02:00
|
|
|
if (TID.getNumDefs() != 1)
|
|
|
|
return false;
|
2010-02-09 20:54:29 +01:00
|
|
|
if (!DefMI->isImplicitDef()) {
|
2009-07-20 21:47:55 +02:00
|
|
|
// Make sure the copy destination register class fits the instruction
|
|
|
|
// definition register class. The mismatch can happen as a result of earlier
|
|
|
|
// extract_subreg, insert_subreg, subreg_to_reg coalescing.
|
2009-07-29 23:36:49 +02:00
|
|
|
const TargetRegisterClass *RC = TID.OpInfo[0].getRegClass(tri_);
|
2009-07-20 21:47:55 +02:00
|
|
|
if (TargetRegisterInfo::isVirtualRegister(DstReg)) {
|
|
|
|
if (mri_->getRegClass(DstReg) != RC)
|
|
|
|
return false;
|
|
|
|
} else if (!RC->contains(DstReg))
|
2009-07-14 02:51:06 +02:00
|
|
|
return false;
|
2009-07-20 21:47:55 +02:00
|
|
|
}
|
2008-08-30 11:09:33 +02:00
|
|
|
|
2009-09-08 08:39:07 +02:00
|
|
|
// If destination register has a sub-register index on it, make sure it mtches
|
|
|
|
// the instruction register class.
|
|
|
|
if (DstSubIdx) {
|
|
|
|
const TargetInstrDesc &TID = DefMI->getDesc();
|
|
|
|
if (TID.getNumDefs() != 1)
|
|
|
|
return false;
|
|
|
|
const TargetRegisterClass *DstRC = mri_->getRegClass(DstReg);
|
|
|
|
const TargetRegisterClass *DstSubRC =
|
|
|
|
DstRC->getSubRegisterRegClass(DstSubIdx);
|
|
|
|
const TargetRegisterClass *DefRC = TID.OpInfo[0].getRegClass(tri_);
|
|
|
|
if (DefRC == DstRC)
|
|
|
|
DstSubIdx = 0;
|
|
|
|
else if (DefRC != DstSubRC)
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2010-06-24 20:15:01 +02:00
|
|
|
RemoveCopyFlag(DstReg, CopyMI);
|
2008-08-30 11:09:33 +02:00
|
|
|
|
2009-02-05 09:45:04 +01:00
|
|
|
// If copy kills the source register, find the last use and propagate
|
|
|
|
// kill.
|
2009-05-12 01:14:13 +02:00
|
|
|
bool checkForDeadDef = false;
|
2008-08-30 11:09:33 +02:00
|
|
|
MachineBasicBlock *MBB = CopyMI->getParent();
|
2010-06-26 00:53:05 +02:00
|
|
|
if (SrcLR->end == CopyIdx.getDefIndex())
|
2009-05-12 01:14:13 +02:00
|
|
|
if (!TrimLiveIntervalToLastUse(CopyIdx, MBB, SrcInt, SrcLR)) {
|
|
|
|
checkForDeadDef = true;
|
|
|
|
}
|
2009-02-05 09:45:04 +01:00
|
|
|
|
2009-12-03 01:50:42 +01:00
|
|
|
MachineBasicBlock::iterator MII =
|
|
|
|
llvm::next(MachineBasicBlock::iterator(CopyMI));
|
2010-06-03 00:47:25 +02:00
|
|
|
tii_->reMaterialize(*MBB, MII, DstReg, DstSubIdx, DefMI, *tri_);
|
2008-08-30 11:09:33 +02:00
|
|
|
MachineInstr *NewMI = prior(MII);
|
2009-05-12 01:14:13 +02:00
|
|
|
|
|
|
|
if (checkForDeadDef) {
|
2009-06-16 09:12:58 +02:00
|
|
|
// PR4090 fix: Trim interval failed because there was no use of the
|
|
|
|
// source interval in this MBB. If the def is in this MBB too then we
|
|
|
|
// should mark it dead:
|
|
|
|
if (DefMI->getParent() == MBB) {
|
|
|
|
DefMI->addRegisterDead(SrcInt.reg, tri_);
|
2009-11-04 00:52:08 +01:00
|
|
|
SrcLR->end = SrcLR->start.getNextSlot();
|
2009-06-16 09:12:58 +02:00
|
|
|
}
|
2009-05-12 01:14:13 +02:00
|
|
|
}
|
|
|
|
|
2008-10-12 01:59:03 +02:00
|
|
|
// CopyMI may have implicit operands, transfer them over to the newly
|
2008-08-30 11:09:33 +02:00
|
|
|
// rematerialized instruction. And update implicit def interval valnos.
|
|
|
|
for (unsigned i = CopyMI->getDesc().getNumOperands(),
|
|
|
|
e = CopyMI->getNumOperands(); i != e; ++i) {
|
|
|
|
MachineOperand &MO = CopyMI->getOperand(i);
|
2008-10-03 17:45:36 +02:00
|
|
|
if (MO.isReg() && MO.isImplicit())
|
2008-08-30 11:09:33 +02:00
|
|
|
NewMI->addOperand(MO);
|
2010-06-24 20:15:01 +02:00
|
|
|
if (MO.isDef())
|
|
|
|
RemoveCopyFlag(MO.getReg(), CopyMI);
|
2008-08-30 11:09:33 +02:00
|
|
|
}
|
|
|
|
|
2010-10-22 23:29:58 +02:00
|
|
|
NewMI->copyImplicitOps(CopyMI);
|
2008-08-30 11:09:33 +02:00
|
|
|
li_->ReplaceMachineInstrInMaps(CopyMI, NewMI);
|
2009-06-16 09:12:58 +02:00
|
|
|
CopyMI->eraseFromParent();
|
2008-08-30 11:09:33 +02:00
|
|
|
ReMatCopies.insert(CopyMI);
|
2008-09-19 19:38:47 +02:00
|
|
|
ReMatDefs.insert(DefMI);
|
2010-02-23 23:44:02 +01:00
|
|
|
DEBUG(dbgs() << "Remat: " << *NewMI);
|
2008-08-30 11:09:33 +02:00
|
|
|
++NumReMats;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2008-02-15 19:24:29 +01:00
|
|
|
/// UpdateRegDefsUses - Replace all defs and uses of SrcReg to DstReg and
|
|
|
|
/// update the subregister number if it is not zero. If DstReg is a
|
|
|
|
/// physical register and the existing subregister number of the def / use
|
|
|
|
/// being updated is not zero, make sure to set it to the correct physical
|
|
|
|
/// subregister.
|
|
|
|
void
|
2010-06-24 20:15:01 +02:00
|
|
|
SimpleRegisterCoalescing::UpdateRegDefsUses(const CoalescerPair &CP) {
|
|
|
|
bool DstIsPhys = CP.isPhys();
|
|
|
|
unsigned SrcReg = CP.getSrcReg();
|
|
|
|
unsigned DstReg = CP.getDstReg();
|
|
|
|
unsigned SubIdx = CP.getSubIdx();
|
2008-02-15 19:24:29 +01:00
|
|
|
|
2010-06-30 02:30:36 +02:00
|
|
|
for (MachineRegisterInfo::reg_iterator I = mri_->reg_begin(SrcReg);
|
|
|
|
MachineInstr *UseMI = I.skipInstruction();) {
|
2010-06-03 01:22:11 +02:00
|
|
|
// A PhysReg copy that won't be coalesced can perhaps be rematerialized
|
|
|
|
// instead.
|
|
|
|
if (DstIsPhys) {
|
2010-07-09 22:43:05 +02:00
|
|
|
if (UseMI->isCopy() &&
|
|
|
|
!UseMI->getOperand(1).getSubReg() &&
|
|
|
|
!UseMI->getOperand(0).getSubReg() &&
|
|
|
|
UseMI->getOperand(1).getReg() == SrcReg &&
|
|
|
|
UseMI->getOperand(0).getReg() != SrcReg &&
|
|
|
|
UseMI->getOperand(0).getReg() != DstReg &&
|
|
|
|
!JoinedCopies.count(UseMI) &&
|
|
|
|
ReMaterializeTrivialDef(li_->getInterval(SrcReg),
|
|
|
|
UseMI->getOperand(0).getReg(), 0, UseMI))
|
|
|
|
continue;
|
2010-06-03 01:22:11 +02:00
|
|
|
}
|
2008-08-30 11:09:33 +02:00
|
|
|
|
2010-06-03 01:22:11 +02:00
|
|
|
SmallVector<unsigned,8> Ops;
|
|
|
|
bool Reads, Writes;
|
|
|
|
tie(Reads, Writes) = UseMI->readsWritesVirtualRegister(SrcReg, &Ops);
|
|
|
|
bool Kills = false, Deads = false;
|
2010-05-19 01:19:42 +02:00
|
|
|
|
2010-06-03 01:22:11 +02:00
|
|
|
// Replace SrcReg with DstReg in all UseMI operands.
|
|
|
|
for (unsigned i = 0, e = Ops.size(); i != e; ++i) {
|
|
|
|
MachineOperand &MO = UseMI->getOperand(Ops[i]);
|
|
|
|
Kills |= MO.isKill();
|
|
|
|
Deads |= MO.isDead();
|
|
|
|
|
|
|
|
if (DstIsPhys)
|
|
|
|
MO.substPhysReg(DstReg, *tri_);
|
|
|
|
else
|
|
|
|
MO.substVirtReg(DstReg, SubIdx, *tri_);
|
2008-09-12 20:13:14 +02:00
|
|
|
}
|
|
|
|
|
2010-06-03 01:22:11 +02:00
|
|
|
// This instruction is a copy that will be removed.
|
|
|
|
if (JoinedCopies.count(UseMI))
|
|
|
|
continue;
|
|
|
|
|
|
|
|
if (SubIdx) {
|
|
|
|
// If UseMI was a simple SrcReg def, make sure we didn't turn it into a
|
|
|
|
// read-modify-write of DstReg.
|
|
|
|
if (Deads)
|
|
|
|
UseMI->addRegisterDead(DstReg, tri_);
|
|
|
|
else if (!Reads && Writes)
|
|
|
|
UseMI->addRegisterDefined(DstReg, tri_);
|
|
|
|
|
|
|
|
// Kill flags apply to the whole physical register.
|
|
|
|
if (DstIsPhys && Kills)
|
|
|
|
UseMI->addRegisterKilled(DstReg, tri_);
|
|
|
|
}
|
2008-09-12 20:13:14 +02:00
|
|
|
|
2010-05-19 01:19:42 +02:00
|
|
|
DEBUG({
|
|
|
|
dbgs() << "\t\tupdated: ";
|
|
|
|
if (!UseMI->isDebugValue())
|
|
|
|
dbgs() << li_->getInstructionIndex(UseMI) << "\t";
|
|
|
|
dbgs() << *UseMI;
|
|
|
|
});
|
2008-02-15 19:24:29 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-03-18 09:26:47 +01:00
|
|
|
/// removeIntervalIfEmpty - Check if the live interval of a physical register
|
|
|
|
/// is empty, if so remove it and also remove the empty intervals of its
|
2008-04-16 22:24:25 +02:00
|
|
|
/// sub-registers. Return true if live interval is removed.
|
|
|
|
static bool removeIntervalIfEmpty(LiveInterval &li, LiveIntervals *li_,
|
2008-03-18 09:26:47 +01:00
|
|
|
const TargetRegisterInfo *tri_) {
|
|
|
|
if (li.empty()) {
|
|
|
|
if (TargetRegisterInfo::isPhysicalRegister(li.reg))
|
|
|
|
for (const unsigned* SR = tri_->getSubRegisters(li.reg); *SR; ++SR) {
|
|
|
|
if (!li_->hasInterval(*SR))
|
|
|
|
continue;
|
|
|
|
LiveInterval &sli = li_->getInterval(*SR);
|
|
|
|
if (sli.empty())
|
|
|
|
li_->removeInterval(*SR);
|
|
|
|
}
|
2008-04-16 03:22:28 +02:00
|
|
|
li_->removeInterval(li.reg);
|
2008-04-16 22:24:25 +02:00
|
|
|
return true;
|
2008-03-18 09:26:47 +01:00
|
|
|
}
|
2008-04-16 22:24:25 +02:00
|
|
|
return false;
|
2008-03-18 09:26:47 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/// ShortenDeadCopyLiveRange - Shorten a live range defined by a dead copy.
|
2008-04-16 22:24:25 +02:00
|
|
|
/// Return true if live interval is removed.
|
|
|
|
bool SimpleRegisterCoalescing::ShortenDeadCopyLiveRange(LiveInterval &li,
|
2008-03-05 23:09:42 +01:00
|
|
|
MachineInstr *CopyMI) {
|
2009-11-04 00:52:08 +01:00
|
|
|
SlotIndex CopyIdx = li_->getInstructionIndex(CopyMI);
|
2008-03-05 23:09:42 +01:00
|
|
|
LiveInterval::iterator MLR =
|
2009-11-04 00:52:08 +01:00
|
|
|
li.FindLiveRangeContaining(CopyIdx.getDefIndex());
|
2008-03-18 09:26:47 +01:00
|
|
|
if (MLR == li.end())
|
2008-04-16 22:24:25 +02:00
|
|
|
return false; // Already removed by ShortenDeadCopySrcLiveRange.
|
2009-11-04 00:52:08 +01:00
|
|
|
SlotIndex RemoveStart = MLR->start;
|
|
|
|
SlotIndex RemoveEnd = MLR->end;
|
|
|
|
SlotIndex DefIdx = CopyIdx.getDefIndex();
|
2008-03-18 09:26:47 +01:00
|
|
|
// Remove the liverange that's defined by this.
|
2009-11-04 00:52:08 +01:00
|
|
|
if (RemoveStart == DefIdx && RemoveEnd == DefIdx.getStoreIndex()) {
|
2008-03-18 09:26:47 +01:00
|
|
|
removeRange(li, RemoveStart, RemoveEnd, li_, tri_);
|
2008-04-16 22:24:25 +02:00
|
|
|
return removeIntervalIfEmpty(li, li_, tri_);
|
2008-03-18 09:26:47 +01:00
|
|
|
}
|
2008-04-16 22:24:25 +02:00
|
|
|
return false;
|
2008-03-18 09:26:47 +01:00
|
|
|
}
|
|
|
|
|
2008-10-28 00:21:01 +01:00
|
|
|
/// RemoveDeadDef - If a def of a live interval is now determined dead, remove
|
|
|
|
/// the val# it defines. If the live interval becomes empty, remove it as well.
|
|
|
|
bool SimpleRegisterCoalescing::RemoveDeadDef(LiveInterval &li,
|
|
|
|
MachineInstr *DefMI) {
|
2009-11-04 00:52:08 +01:00
|
|
|
SlotIndex DefIdx = li_->getInstructionIndex(DefMI).getDefIndex();
|
2008-10-28 00:21:01 +01:00
|
|
|
LiveInterval::iterator MLR = li.FindLiveRangeContaining(DefIdx);
|
|
|
|
if (DefIdx != MLR->valno->def)
|
|
|
|
return false;
|
|
|
|
li.removeValNo(MLR->valno);
|
|
|
|
return removeIntervalIfEmpty(li, li_, tri_);
|
|
|
|
}
|
|
|
|
|
2010-06-24 20:15:01 +02:00
|
|
|
void SimpleRegisterCoalescing::RemoveCopyFlag(unsigned DstReg,
|
|
|
|
const MachineInstr *CopyMI) {
|
|
|
|
SlotIndex DefIdx = li_->getInstructionIndex(CopyMI).getDefIndex();
|
|
|
|
if (li_->hasInterval(DstReg)) {
|
|
|
|
LiveInterval &LI = li_->getInterval(DstReg);
|
|
|
|
if (const LiveRange *LR = LI.getLiveRangeContaining(DefIdx))
|
2010-09-25 20:10:38 +02:00
|
|
|
if (LR->valno->def == DefIdx)
|
2010-06-24 20:15:01 +02:00
|
|
|
LR->valno->setCopy(0);
|
|
|
|
}
|
|
|
|
if (!TargetRegisterInfo::isPhysicalRegister(DstReg))
|
|
|
|
return;
|
|
|
|
for (const unsigned* AS = tri_->getAliasSet(DstReg); *AS; ++AS) {
|
|
|
|
if (!li_->hasInterval(*AS))
|
|
|
|
continue;
|
|
|
|
LiveInterval &LI = li_->getInterval(*AS);
|
|
|
|
if (const LiveRange *LR = LI.getLiveRangeContaining(DefIdx))
|
2010-09-25 20:10:38 +02:00
|
|
|
if (LR->valno->def == DefIdx)
|
2010-06-24 20:15:01 +02:00
|
|
|
LR->valno->setCopy(0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-03-26 21:15:49 +01:00
|
|
|
/// PropagateDeadness - Propagate the dead marker to the instruction which
|
|
|
|
/// defines the val#.
|
|
|
|
static void PropagateDeadness(LiveInterval &li, MachineInstr *CopyMI,
|
2009-11-04 00:52:08 +01:00
|
|
|
SlotIndex &LRStart, LiveIntervals *li_,
|
2008-03-26 21:15:49 +01:00
|
|
|
const TargetRegisterInfo* tri_) {
|
|
|
|
MachineInstr *DefMI =
|
2009-11-04 00:52:08 +01:00
|
|
|
li_->getInstructionFromIndex(LRStart.getDefIndex());
|
2008-03-26 21:15:49 +01:00
|
|
|
if (DefMI && DefMI != CopyMI) {
|
2010-05-21 22:53:24 +02:00
|
|
|
int DeadIdx = DefMI->findRegisterDefOperandIdx(li.reg);
|
2009-08-07 09:14:14 +02:00
|
|
|
if (DeadIdx != -1)
|
2008-03-26 21:15:49 +01:00
|
|
|
DefMI->getOperand(DeadIdx).setIsDead();
|
2009-08-07 09:14:14 +02:00
|
|
|
else
|
|
|
|
DefMI->addOperand(MachineOperand::CreateReg(li.reg,
|
2009-10-25 08:48:51 +01:00
|
|
|
/*def*/true, /*implicit*/true, /*kill*/false, /*dead*/true));
|
2009-11-04 00:52:08 +01:00
|
|
|
LRStart = LRStart.getNextSlot();
|
2008-03-26 21:15:49 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-04-17 07:20:39 +02:00
|
|
|
/// ShortenDeadCopySrcLiveRange - Shorten a live range as it's artificially
|
|
|
|
/// extended by a dead copy. Mark the last use (if any) of the val# as kill as
|
|
|
|
/// ends the live range there. If there isn't another use, then this live range
|
|
|
|
/// is dead. Return true if live interval is removed.
|
2008-04-16 22:24:25 +02:00
|
|
|
bool
|
2008-03-18 09:26:47 +01:00
|
|
|
SimpleRegisterCoalescing::ShortenDeadCopySrcLiveRange(LiveInterval &li,
|
|
|
|
MachineInstr *CopyMI) {
|
2009-11-04 00:52:08 +01:00
|
|
|
SlotIndex CopyIdx = li_->getInstructionIndex(CopyMI);
|
|
|
|
if (CopyIdx == SlotIndex()) {
|
2008-03-18 09:26:47 +01:00
|
|
|
// FIXME: special case: function live in. It can be a general case if the
|
|
|
|
// first instruction index starts at > 0 value.
|
|
|
|
assert(TargetRegisterInfo::isPhysicalRegister(li.reg));
|
|
|
|
// Live-in to the function but dead. Remove it from entry live-in set.
|
2008-04-24 11:06:33 +02:00
|
|
|
if (mf_->begin()->isLiveIn(li.reg))
|
|
|
|
mf_->begin()->removeLiveIn(li.reg);
|
2010-07-09 06:35:38 +02:00
|
|
|
if (const LiveRange *LR = li.getLiveRangeContaining(CopyIdx))
|
|
|
|
removeRange(li, LR->start, LR->end, li_, tri_);
|
2008-04-16 22:24:25 +02:00
|
|
|
return removeIntervalIfEmpty(li, li_, tri_);
|
2008-03-18 09:26:47 +01:00
|
|
|
}
|
|
|
|
|
2009-09-04 22:41:11 +02:00
|
|
|
LiveInterval::iterator LR =
|
2009-11-04 00:52:08 +01:00
|
|
|
li.FindLiveRangeContaining(CopyIdx.getPrevIndex().getStoreIndex());
|
2008-03-18 09:26:47 +01:00
|
|
|
if (LR == li.end())
|
|
|
|
// Livein but defined by a phi.
|
2008-04-16 22:24:25 +02:00
|
|
|
return false;
|
2008-03-18 09:26:47 +01:00
|
|
|
|
2009-11-04 00:52:08 +01:00
|
|
|
SlotIndex RemoveStart = LR->start;
|
|
|
|
SlotIndex RemoveEnd = CopyIdx.getStoreIndex();
|
2008-03-18 09:26:47 +01:00
|
|
|
if (LR->end > RemoveEnd)
|
|
|
|
// More uses past this copy? Nothing to do.
|
2008-04-16 22:24:25 +02:00
|
|
|
return false;
|
2008-03-18 09:26:47 +01:00
|
|
|
|
2009-02-05 09:45:04 +01:00
|
|
|
// If there is a last use in the same bb, we can't remove the live range.
|
|
|
|
// Shorten the live interval and return.
|
2009-02-09 09:37:45 +01:00
|
|
|
MachineBasicBlock *CopyMBB = CopyMI->getParent();
|
|
|
|
if (TrimLiveIntervalToLastUse(CopyIdx, CopyMBB, li, LR))
|
2008-04-16 22:24:25 +02:00
|
|
|
return false;
|
2008-03-18 09:26:47 +01:00
|
|
|
|
2009-07-15 23:39:50 +02:00
|
|
|
// There are other kills of the val#. Nothing to do.
|
|
|
|
if (!li.isOnlyLROfValNo(LR))
|
|
|
|
return false;
|
|
|
|
|
2009-02-09 09:37:45 +01:00
|
|
|
MachineBasicBlock *StartMBB = li_->getMBBFromIndex(RemoveStart);
|
|
|
|
if (!isSameOrFallThroughBB(StartMBB, CopyMBB, tii_))
|
|
|
|
// If the live range starts in another mbb and the copy mbb is not a fall
|
|
|
|
// through mbb, then we can only cut the range from the beginning of the
|
|
|
|
// copy mbb.
|
2009-11-04 00:52:08 +01:00
|
|
|
RemoveStart = li_->getMBBStartIdx(CopyMBB).getNextIndex().getBaseIndex();
|
2009-02-09 09:37:45 +01:00
|
|
|
|
2009-02-08 08:48:37 +01:00
|
|
|
if (LR->valno->def == RemoveStart) {
|
|
|
|
// If the def MI defines the val# and this copy is the only kill of the
|
|
|
|
// val#, then propagate the dead marker.
|
2009-07-16 11:20:10 +02:00
|
|
|
PropagateDeadness(li, CopyMI, RemoveStart, li_, tri_);
|
|
|
|
++numDeadValNo;
|
2009-02-08 08:48:37 +01:00
|
|
|
}
|
2008-03-26 21:15:49 +01:00
|
|
|
|
2009-02-09 09:37:45 +01:00
|
|
|
removeRange(li, RemoveStart, RemoveEnd, li_, tri_);
|
2008-04-16 22:24:25 +02:00
|
|
|
return removeIntervalIfEmpty(li, li_, tri_);
|
2008-03-05 23:09:42 +01:00
|
|
|
}
|
|
|
|
|
2008-04-09 22:57:25 +02:00
|
|
|
|
2009-01-23 03:15:19 +01:00
|
|
|
/// isWinToJoinCrossClass - Return true if it's profitable to coalesce
|
|
|
|
/// two virtual registers from different register classes.
|
2008-06-19 03:39:21 +02:00
|
|
|
bool
|
2010-04-21 02:44:22 +02:00
|
|
|
SimpleRegisterCoalescing::isWinToJoinCrossClass(unsigned SrcReg,
|
|
|
|
unsigned DstReg,
|
|
|
|
const TargetRegisterClass *SrcRC,
|
|
|
|
const TargetRegisterClass *DstRC,
|
|
|
|
const TargetRegisterClass *NewRC) {
|
|
|
|
unsigned NewRCCount = allocatableRCRegs_[NewRC].count();
|
|
|
|
// This heuristics is good enough in practice, but it's obviously not *right*.
|
|
|
|
// 4 is a magic number that works well enough for x86, ARM, etc. It filter
|
|
|
|
// out all but the most restrictive register classes.
|
|
|
|
if (NewRCCount > 4 ||
|
|
|
|
// Early exit if the function is fairly small, coalesce aggressively if
|
|
|
|
// that's the case. For really special register classes with 3 or
|
|
|
|
// fewer registers, be a bit more careful.
|
|
|
|
(li_->getFuncInstructionCount() / NewRCCount) < 8)
|
|
|
|
return true;
|
|
|
|
LiveInterval &SrcInt = li_->getInterval(SrcReg);
|
|
|
|
LiveInterval &DstInt = li_->getInterval(DstReg);
|
|
|
|
unsigned SrcSize = li_->getApproximateInstructionCount(SrcInt);
|
|
|
|
unsigned DstSize = li_->getApproximateInstructionCount(DstInt);
|
|
|
|
if (SrcSize <= NewRCCount && DstSize <= NewRCCount)
|
|
|
|
return true;
|
|
|
|
// Estimate *register use density*. If it doubles or more, abort.
|
|
|
|
unsigned SrcUses = std::distance(mri_->use_nodbg_begin(SrcReg),
|
|
|
|
mri_->use_nodbg_end());
|
|
|
|
unsigned DstUses = std::distance(mri_->use_nodbg_begin(DstReg),
|
|
|
|
mri_->use_nodbg_end());
|
2010-04-30 20:28:11 +02:00
|
|
|
unsigned NewUses = SrcUses + DstUses;
|
|
|
|
unsigned NewSize = SrcSize + DstSize;
|
2010-04-21 02:44:22 +02:00
|
|
|
if (SrcRC != NewRC && SrcSize > NewRCCount) {
|
|
|
|
unsigned SrcRCCount = allocatableRCRegs_[SrcRC].count();
|
2010-04-30 20:28:11 +02:00
|
|
|
if (NewUses*SrcSize*SrcRCCount > 2*SrcUses*NewSize*NewRCCount)
|
2010-04-21 02:44:22 +02:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
if (DstRC != NewRC && DstSize > NewRCCount) {
|
|
|
|
unsigned DstRCCount = allocatableRCRegs_[DstRC].count();
|
2010-04-30 20:28:11 +02:00
|
|
|
if (NewUses*DstSize*DstRCCount > 2*DstUses*NewSize*NewRCCount)
|
2009-01-23 03:15:19 +01:00
|
|
|
return false;
|
2010-02-11 21:58:45 +01:00
|
|
|
}
|
2009-01-23 03:15:19 +01:00
|
|
|
return true;
|
2008-06-19 03:39:21 +02:00
|
|
|
}
|
|
|
|
|
2010-06-24 02:52:22 +02:00
|
|
|
|
2007-06-08 19:18:56 +02:00
|
|
|
/// JoinCopy - Attempt to join intervals corresponding to SrcReg/DstReg,
|
|
|
|
/// which are the src/dst of the copy instruction CopyMI. This returns true
|
2007-11-01 07:22:48 +01:00
|
|
|
/// if the copy was successfully coalesced away. If it is not currently
|
|
|
|
/// possible to coalesce this interval, but it may be possible if other
|
|
|
|
/// things get coalesced, then it returns true by reference in 'Again'.
|
2008-02-13 04:01:43 +01:00
|
|
|
bool SimpleRegisterCoalescing::JoinCopy(CopyRec &TheCopy, bool &Again) {
|
2007-11-06 09:52:21 +01:00
|
|
|
MachineInstr *CopyMI = TheCopy.MI;
|
|
|
|
|
|
|
|
Again = false;
|
2008-08-30 11:09:33 +02:00
|
|
|
if (JoinedCopies.count(CopyMI) || ReMatCopies.count(CopyMI))
|
2007-11-06 09:52:21 +01:00
|
|
|
return false; // Already done.
|
|
|
|
|
2010-01-05 02:25:58 +01:00
|
|
|
DEBUG(dbgs() << li_->getInstructionIndex(CopyMI) << '\t' << *CopyMI);
|
2007-06-08 19:18:56 +02:00
|
|
|
|
2010-06-15 18:04:21 +02:00
|
|
|
CoalescerPair CP(*tii_, *tri_);
|
|
|
|
if (!CP.setRegisters(CopyMI)) {
|
|
|
|
DEBUG(dbgs() << "\tNot coalescable.\n");
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2010-06-24 20:15:01 +02:00
|
|
|
// If they are already joined we continue.
|
|
|
|
if (CP.getSrcReg() == CP.getDstReg()) {
|
|
|
|
DEBUG(dbgs() << "\tCopy already coalesced.\n");
|
2010-06-24 02:52:22 +02:00
|
|
|
return false; // Not coalescable.
|
|
|
|
}
|
2010-04-30 01:47:46 +02:00
|
|
|
|
2010-08-31 03:27:49 +02:00
|
|
|
if (DisablePhysicalJoin && CP.isPhys()) {
|
|
|
|
DEBUG(dbgs() << "\tPhysical joins disabled.\n");
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2010-06-24 20:15:01 +02:00
|
|
|
DEBUG(dbgs() << "\tConsidering merging %reg" << CP.getSrcReg());
|
2010-06-24 02:52:22 +02:00
|
|
|
|
2010-06-24 20:15:01 +02:00
|
|
|
// Enforce policies.
|
|
|
|
if (CP.isPhys()) {
|
|
|
|
DEBUG(dbgs() <<" with physreg %" << tri_->getName(CP.getDstReg()) << "\n");
|
|
|
|
// Only coalesce to allocatable physreg.
|
2010-08-10 02:02:26 +02:00
|
|
|
if (!li_->isAllocatable(CP.getDstReg())) {
|
2010-06-24 20:15:01 +02:00
|
|
|
DEBUG(dbgs() << "\tRegister is an unallocatable physreg.\n");
|
|
|
|
return false; // Not coalescable.
|
2010-06-24 02:52:22 +02:00
|
|
|
}
|
2010-06-24 20:15:01 +02:00
|
|
|
} else {
|
|
|
|
DEBUG({
|
|
|
|
dbgs() << " with reg%" << CP.getDstReg();
|
|
|
|
if (CP.getSubIdx())
|
|
|
|
dbgs() << ":" << tri_->getSubRegIndexName(CP.getSubIdx());
|
|
|
|
dbgs() << " to " << CP.getNewRC()->getName() << "\n";
|
|
|
|
});
|
2009-07-20 21:47:55 +02:00
|
|
|
|
2010-06-24 20:15:01 +02:00
|
|
|
// Avoid constraining virtual register regclass too much.
|
|
|
|
if (CP.isCrossClass()) {
|
|
|
|
if (DisableCrossClassJoin) {
|
|
|
|
DEBUG(dbgs() << "\tCross-class joins disabled.\n");
|
Implement support for using modeling implicit-zero-extension on x86-64
with SUBREG_TO_REG, teach SimpleRegisterCoalescing to coalesce
SUBREG_TO_REG instructions (which are similar to INSERT_SUBREG
instructions), and teach the DAGCombiner to take advantage of this on
targets which support it. This eliminates many redundant
zero-extension operations on x86-64.
This adds a new TargetLowering hook, isZExtFree. It's similar to
isTruncateFree, except it only applies to actual definitions, and not
no-op truncates which may not zero the high bits.
Also, this adds a new optimization to SimplifyDemandedBits: transform
operations like x+y into (zext (add (trunc x), (trunc y))) on targets
where all the casts are no-ops. In contexts where the high part of the
add is explicitly masked off, this allows the mask operation to be
eliminated. Fix the DAGCombiner to avoid undoing these transformations
to eliminate casts on targets where the casts are no-ops.
Also, this adds a new two-address lowering heuristic. Since
two-address lowering runs before coalescing, it helps to be able to
look through copies when deciding whether commuting and/or
three-address conversion are profitable.
Also, fix a bug in LiveInterval::MergeInClobberRanges. It didn't handle
the case that a clobber range extended both before and beyond an
existing live range. In that case, multiple live ranges need to be
added. This was exposed by the new subreg coalescing code.
Remove 2008-05-06-SpillerBug.ll. It was bugpoint-reduced, and the
spiller behavior it was looking for no longer occurrs with the new
instruction selection.
llvm-svn: 68576
2009-04-08 02:15:30 +02:00
|
|
|
return false;
|
2010-06-24 02:52:22 +02:00
|
|
|
}
|
2010-06-24 20:15:01 +02:00
|
|
|
if (!isWinToJoinCrossClass(CP.getSrcReg(), CP.getDstReg(),
|
|
|
|
mri_->getRegClass(CP.getSrcReg()),
|
|
|
|
mri_->getRegClass(CP.getDstReg()),
|
|
|
|
CP.getNewRC())) {
|
2010-04-30 01:25:34 +02:00
|
|
|
DEBUG(dbgs() << "\tAvoid coalescing to constrained register class: "
|
2010-06-24 20:15:01 +02:00
|
|
|
<< CP.getNewRC()->getName() << ".\n");
|
2010-04-21 02:44:22 +02:00
|
|
|
Again = true; // May be possible to coalesce later.
|
|
|
|
return false;
|
|
|
|
}
|
2008-06-19 03:39:21 +02:00
|
|
|
}
|
2010-03-18 01:23:47 +01:00
|
|
|
|
2010-06-24 20:15:01 +02:00
|
|
|
// When possible, let DstReg be the larger interval.
|
|
|
|
if (!CP.getSubIdx() && li_->getInterval(CP.getSrcReg()).ranges.size() >
|
|
|
|
li_->getInterval(CP.getDstReg()).ranges.size())
|
|
|
|
CP.flip();
|
|
|
|
}
|
|
|
|
|
|
|
|
// We need to be careful about coalescing a source physical register with a
|
|
|
|
// virtual register. Once the coalescing is done, it cannot be broken and
|
|
|
|
// these are not spillable! If the destination interval uses are far away,
|
|
|
|
// think twice about coalescing them!
|
|
|
|
// FIXME: Why are we skipping this test for partial copies?
|
|
|
|
// CodeGen/X86/phys_subreg_coalesce-3.ll needs it.
|
|
|
|
if (!CP.isPartial() && CP.isPhys()) {
|
|
|
|
LiveInterval &JoinVInt = li_->getInterval(CP.getSrcReg());
|
|
|
|
|
|
|
|
// Don't join with physregs that have a ridiculous number of live
|
|
|
|
// ranges. The data structure performance is really bad when that
|
|
|
|
// happens.
|
|
|
|
if (li_->hasInterval(CP.getDstReg()) &&
|
|
|
|
li_->getInterval(CP.getDstReg()).ranges.size() > 1000) {
|
|
|
|
++numAborts;
|
|
|
|
DEBUG(dbgs()
|
|
|
|
<< "\tPhysical register live interval too complicated, abort!\n");
|
|
|
|
return false;
|
|
|
|
}
|
2007-06-08 19:18:56 +02:00
|
|
|
|
2010-06-24 20:15:01 +02:00
|
|
|
const TargetRegisterClass *RC = mri_->getRegClass(CP.getSrcReg());
|
|
|
|
unsigned Threshold = allocatableRCRegs_[RC].count() * 2;
|
|
|
|
unsigned Length = li_->getApproximateInstructionCount(JoinVInt);
|
|
|
|
if (Length > Threshold &&
|
|
|
|
std::distance(mri_->use_nodbg_begin(CP.getSrcReg()),
|
|
|
|
mri_->use_nodbg_end()) * Threshold < Length) {
|
|
|
|
// Before giving up coalescing, if definition of source is defined by
|
|
|
|
// trivial computation, try rematerializing it.
|
|
|
|
if (!CP.isFlipped() &&
|
|
|
|
ReMaterializeTrivialDef(JoinVInt, CP.getDstReg(), 0, CopyMI))
|
|
|
|
return true;
|
2010-06-24 02:52:22 +02:00
|
|
|
|
2010-06-24 20:15:01 +02:00
|
|
|
++numAborts;
|
|
|
|
DEBUG(dbgs() << "\tMay tie down a physical register, abort!\n");
|
|
|
|
Again = true; // May be possible to coalesce later.
|
|
|
|
return false;
|
2010-06-24 02:52:22 +02:00
|
|
|
}
|
|
|
|
}
|
2010-06-24 02:12:39 +02:00
|
|
|
|
2007-06-08 19:18:56 +02:00
|
|
|
// Okay, attempt to join these two intervals. On failure, this returns false.
|
|
|
|
// Otherwise, if one of the intervals being joined is a physreg, this method
|
|
|
|
// always canonicalizes DstInt to be it. The output "SrcInt" will not have
|
|
|
|
// been modified, so we can use this information below to update aliases.
|
2010-06-24 20:15:01 +02:00
|
|
|
if (!JoinIntervals(CP)) {
|
2007-07-09 14:00:59 +02:00
|
|
|
// Coalescing failed.
|
2008-08-30 11:09:33 +02:00
|
|
|
|
|
|
|
// If definition of source is defined by trivial computation, try
|
|
|
|
// rematerializing it.
|
2010-06-24 20:15:01 +02:00
|
|
|
if (!CP.isFlipped() &&
|
|
|
|
ReMaterializeTrivialDef(li_->getInterval(CP.getSrcReg()),
|
|
|
|
CP.getDstReg(), 0, CopyMI))
|
2008-08-30 11:09:33 +02:00
|
|
|
return true;
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2007-06-08 19:18:56 +02:00
|
|
|
// If we can eliminate the copy without merging the live ranges, do so now.
|
2010-06-24 20:15:01 +02:00
|
|
|
if (!CP.isPartial()) {
|
2010-06-24 22:16:00 +02:00
|
|
|
if (AdjustCopiesBackFrom(CP, CopyMI) ||
|
2010-07-06 22:31:51 +02:00
|
|
|
RemoveCopyByCommutingDef(CP, CopyMI)) {
|
2010-06-24 20:15:01 +02:00
|
|
|
JoinedCopies.insert(CopyMI);
|
|
|
|
DEBUG(dbgs() << "\tTrivial!\n");
|
|
|
|
return true;
|
|
|
|
}
|
2007-11-06 09:52:21 +01:00
|
|
|
}
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2007-06-08 19:18:56 +02:00
|
|
|
// Otherwise, we are unable to join the intervals.
|
2010-04-30 00:21:48 +02:00
|
|
|
DEBUG(dbgs() << "\tInterference!\n");
|
2007-11-01 07:22:48 +01:00
|
|
|
Again = true; // May be possible to coalesce later.
|
2007-06-08 19:18:56 +02:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2008-06-19 03:39:21 +02:00
|
|
|
// Coalescing to a virtual register that is of a sub-register class of the
|
|
|
|
// other. Make sure the resulting register is set to the right register class.
|
2010-06-24 20:15:01 +02:00
|
|
|
if (CP.isCrossClass()) {
|
2009-07-18 04:10:10 +02:00
|
|
|
++numCrossRCs;
|
2010-06-24 20:15:01 +02:00
|
|
|
mri_->setRegClass(CP.getDstReg(), CP.getNewRC());
|
|
|
|
}
|
2008-06-19 03:39:21 +02:00
|
|
|
|
2008-02-15 19:24:29 +01:00
|
|
|
// Remember to delete the copy instruction.
|
|
|
|
JoinedCopies.insert(CopyMI);
|
|
|
|
|
2010-06-24 20:15:01 +02:00
|
|
|
UpdateRegDefsUses(CP);
|
2007-06-08 19:18:56 +02:00
|
|
|
|
2010-02-09 18:20:11 +01:00
|
|
|
// If we have extended the live range of a physical register, make sure we
|
|
|
|
// update live-in lists as well.
|
2010-06-24 20:15:01 +02:00
|
|
|
if (CP.isPhys()) {
|
2010-02-09 18:20:11 +01:00
|
|
|
SmallVector<MachineBasicBlock*, 16> BlockSeq;
|
2010-06-24 20:15:01 +02:00
|
|
|
// JoinIntervals invalidates the VNInfos in SrcInt, but we only need the
|
|
|
|
// ranges for this, and they are preserved.
|
|
|
|
LiveInterval &SrcInt = li_->getInterval(CP.getSrcReg());
|
|
|
|
for (LiveInterval::const_iterator I = SrcInt.begin(), E = SrcInt.end();
|
|
|
|
I != E; ++I ) {
|
2010-02-09 18:20:11 +01:00
|
|
|
li_->findLiveInMBBs(I->start, I->end, BlockSeq);
|
|
|
|
for (unsigned idx = 0, size = BlockSeq.size(); idx != size; ++idx) {
|
|
|
|
MachineBasicBlock &block = *BlockSeq[idx];
|
2010-06-24 20:15:01 +02:00
|
|
|
if (!block.isLiveIn(CP.getDstReg()))
|
|
|
|
block.addLiveIn(CP.getDstReg());
|
2010-02-09 18:20:11 +01:00
|
|
|
}
|
|
|
|
BlockSeq.clear();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-08-30 11:09:33 +02:00
|
|
|
// SrcReg is guarateed to be the register whose live interval that is
|
|
|
|
// being merged.
|
2010-06-24 20:15:01 +02:00
|
|
|
li_->removeInterval(CP.getSrcReg());
|
2008-08-30 11:09:33 +02:00
|
|
|
|
2009-06-18 04:04:01 +02:00
|
|
|
// Update regalloc hint.
|
2010-06-24 20:15:01 +02:00
|
|
|
tri_->UpdateRegAllocHint(CP.getSrcReg(), CP.getDstReg(), *mf_);
|
2008-09-09 23:44:23 +02:00
|
|
|
|
2009-08-22 22:52:46 +02:00
|
|
|
DEBUG({
|
2010-06-24 20:15:01 +02:00
|
|
|
LiveInterval &DstInt = li_->getInterval(CP.getDstReg());
|
|
|
|
dbgs() << "\tJoined. Result = ";
|
|
|
|
DstInt.print(dbgs(), tri_);
|
|
|
|
dbgs() << "\n";
|
|
|
|
});
|
2008-04-03 18:41:54 +02:00
|
|
|
|
2007-06-08 19:18:56 +02:00
|
|
|
++numJoins;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// ComputeUltimateVN - Assuming we are going to join two live intervals,
|
|
|
|
/// compute what the resultant value numbers for each value in the input two
|
|
|
|
/// ranges will be. This is complicated by copies between the two which can
|
|
|
|
/// and will commonly cause multiple value numbers to be merged into one.
|
|
|
|
///
|
|
|
|
/// VN is the value number that we're trying to resolve. InstDefiningValue
|
|
|
|
/// keeps track of the new InstDefiningValue assignment for the result
|
|
|
|
/// LiveInterval. ThisFromOther/OtherFromThis are sets that keep track of
|
|
|
|
/// whether a value in this or other is a copy from the opposite set.
|
|
|
|
/// ThisValNoAssignments/OtherValNoAssignments keep track of value #'s that have
|
|
|
|
/// already been assigned.
|
|
|
|
///
|
|
|
|
/// ThisFromOther[x] - If x is defined as a copy from the other interval, this
|
|
|
|
/// contains the value number the copy is from.
|
|
|
|
///
|
2007-08-29 22:45:00 +02:00
|
|
|
static unsigned ComputeUltimateVN(VNInfo *VNI,
|
|
|
|
SmallVector<VNInfo*, 16> &NewVNInfo,
|
2007-08-31 23:23:06 +02:00
|
|
|
DenseMap<VNInfo*, VNInfo*> &ThisFromOther,
|
|
|
|
DenseMap<VNInfo*, VNInfo*> &OtherFromThis,
|
2007-06-08 19:18:56 +02:00
|
|
|
SmallVector<int, 16> &ThisValNoAssignments,
|
2007-08-29 22:45:00 +02:00
|
|
|
SmallVector<int, 16> &OtherValNoAssignments) {
|
|
|
|
unsigned VN = VNI->id;
|
|
|
|
|
2007-06-08 19:18:56 +02:00
|
|
|
// If the VN has already been computed, just return it.
|
|
|
|
if (ThisValNoAssignments[VN] >= 0)
|
|
|
|
return ThisValNoAssignments[VN];
|
2010-02-21 19:51:48 +01:00
|
|
|
assert(ThisValNoAssignments[VN] != -2 && "Cyclic value numbers");
|
2007-08-29 22:45:00 +02:00
|
|
|
|
2007-06-08 19:18:56 +02:00
|
|
|
// If this val is not a copy from the other val, then it must be a new value
|
|
|
|
// number in the destination.
|
2007-08-31 23:23:06 +02:00
|
|
|
DenseMap<VNInfo*, VNInfo*>::iterator I = ThisFromOther.find(VNI);
|
2007-08-31 10:04:17 +02:00
|
|
|
if (I == ThisFromOther.end()) {
|
2007-08-29 22:45:00 +02:00
|
|
|
NewVNInfo.push_back(VNI);
|
|
|
|
return ThisValNoAssignments[VN] = NewVNInfo.size()-1;
|
2007-06-08 19:18:56 +02:00
|
|
|
}
|
2007-08-31 10:04:17 +02:00
|
|
|
VNInfo *OtherValNo = I->second;
|
2007-06-08 19:18:56 +02:00
|
|
|
|
|
|
|
// Otherwise, this *is* a copy from the RHS. If the other side has already
|
|
|
|
// been computed, return it.
|
2007-08-29 22:45:00 +02:00
|
|
|
if (OtherValNoAssignments[OtherValNo->id] >= 0)
|
|
|
|
return ThisValNoAssignments[VN] = OtherValNoAssignments[OtherValNo->id];
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2007-06-08 19:18:56 +02:00
|
|
|
// Mark this value number as currently being computed, then ask what the
|
|
|
|
// ultimate value # of the other value is.
|
|
|
|
ThisValNoAssignments[VN] = -2;
|
|
|
|
unsigned UltimateVN =
|
2007-08-29 22:45:00 +02:00
|
|
|
ComputeUltimateVN(OtherValNo, NewVNInfo, OtherFromThis, ThisFromOther,
|
|
|
|
OtherValNoAssignments, ThisValNoAssignments);
|
2007-06-08 19:18:56 +02:00
|
|
|
return ThisValNoAssignments[VN] = UltimateVN;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// JoinIntervals - Attempt to join these two intervals. On failure, this
|
2010-06-24 20:15:01 +02:00
|
|
|
/// returns false.
|
|
|
|
bool SimpleRegisterCoalescing::JoinIntervals(CoalescerPair &CP) {
|
|
|
|
LiveInterval &RHS = li_->getInterval(CP.getSrcReg());
|
|
|
|
DEBUG({ dbgs() << "\t\tRHS = "; RHS.print(dbgs(), tri_); dbgs() << "\n"; });
|
|
|
|
|
2010-07-06 22:31:51 +02:00
|
|
|
// If a live interval is a physical register, check for interference with any
|
|
|
|
// aliases. The interference check implemented here is a bit more conservative
|
|
|
|
// than the full interfeence check below. We allow overlapping live ranges
|
|
|
|
// only when one is a copy of the other.
|
|
|
|
if (CP.isPhys()) {
|
|
|
|
for (const unsigned *AS = tri_->getAliasSet(CP.getDstReg()); *AS; ++AS){
|
|
|
|
if (!li_->hasInterval(*AS))
|
|
|
|
continue;
|
|
|
|
const LiveInterval &LHS = li_->getInterval(*AS);
|
|
|
|
LiveInterval::const_iterator LI = LHS.begin();
|
|
|
|
for (LiveInterval::const_iterator RI = RHS.begin(), RE = RHS.end();
|
|
|
|
RI != RE; ++RI) {
|
|
|
|
LI = std::lower_bound(LI, LHS.end(), RI->start);
|
|
|
|
// Does LHS have an overlapping live range starting before RI?
|
|
|
|
if ((LI != LHS.begin() && LI[-1].end > RI->start) &&
|
|
|
|
(RI->start != RI->valno->def ||
|
|
|
|
!CP.isCoalescable(li_->getInstructionFromIndex(RI->start)))) {
|
2009-08-22 22:52:46 +02:00
|
|
|
DEBUG({
|
2010-07-06 22:31:51 +02:00
|
|
|
dbgs() << "\t\tInterference from alias: ";
|
|
|
|
LHS.print(dbgs(), tri_);
|
|
|
|
dbgs() << "\n\t\tOverlap at " << RI->start << " and no copy.\n";
|
|
|
|
});
|
2009-01-07 03:08:57 +01:00
|
|
|
return false;
|
|
|
|
}
|
2010-07-06 22:31:51 +02:00
|
|
|
|
|
|
|
// Check that LHS ranges beginning in this range are copies.
|
|
|
|
for (; LI != LHS.end() && LI->start < RI->end; ++LI) {
|
|
|
|
if (LI->start != LI->valno->def ||
|
|
|
|
!CP.isCoalescable(li_->getInstructionFromIndex(LI->start))) {
|
|
|
|
DEBUG({
|
|
|
|
dbgs() << "\t\tInterference from alias: ";
|
|
|
|
LHS.print(dbgs(), tri_);
|
|
|
|
dbgs() << "\n\t\tDef at " << LI->start << " is not a copy.\n";
|
|
|
|
});
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2009-01-07 03:08:57 +01:00
|
|
|
}
|
2007-06-08 19:18:56 +02:00
|
|
|
}
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2010-06-24 20:15:01 +02:00
|
|
|
// Compute the final value assignment, assuming that the live ranges can be
|
|
|
|
// coalesced.
|
|
|
|
SmallVector<int, 16> LHSValNoAssignments;
|
|
|
|
SmallVector<int, 16> RHSValNoAssignments;
|
|
|
|
DenseMap<VNInfo*, VNInfo*> LHSValsDefinedFromRHS;
|
|
|
|
DenseMap<VNInfo*, VNInfo*> RHSValsDefinedFromLHS;
|
|
|
|
SmallVector<VNInfo*, 16> NewVNInfo;
|
|
|
|
|
2010-07-06 22:31:51 +02:00
|
|
|
LiveInterval &LHS = li_->getOrCreateInterval(CP.getDstReg());
|
2010-06-24 20:15:01 +02:00
|
|
|
DEBUG({ dbgs() << "\t\tLHS = "; LHS.print(dbgs(), tri_); dbgs() << "\n"; });
|
|
|
|
|
2010-06-22 18:13:57 +02:00
|
|
|
// Loop over the value numbers of the LHS, seeing if any are defined from
|
|
|
|
// the RHS.
|
|
|
|
for (LiveInterval::vni_iterator i = LHS.vni_begin(), e = LHS.vni_end();
|
|
|
|
i != e; ++i) {
|
|
|
|
VNInfo *VNI = *i;
|
2010-09-25 20:10:38 +02:00
|
|
|
if (VNI->isUnused() || !VNI->isDefByCopy()) // Src not defined by a copy?
|
2010-06-22 18:13:57 +02:00
|
|
|
continue;
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2010-06-22 18:13:57 +02:00
|
|
|
// Never join with a register that has EarlyClobber redefs.
|
|
|
|
if (VNI->hasRedefByEC())
|
|
|
|
return false;
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2010-06-22 18:13:57 +02:00
|
|
|
// DstReg is known to be a register in the LHS interval. If the src is
|
|
|
|
// from the RHS interval, we can use its value #.
|
|
|
|
if (!CP.isCoalescable(VNI->getCopy()))
|
|
|
|
continue;
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2010-06-22 18:13:57 +02:00
|
|
|
// Figure out the value # from the RHS.
|
|
|
|
LiveRange *lr = RHS.getLiveRangeContaining(VNI->def.getPrevSlot());
|
|
|
|
// The copy could be to an aliased physreg.
|
|
|
|
if (!lr) continue;
|
|
|
|
LHSValsDefinedFromRHS[VNI] = lr->valno;
|
|
|
|
}
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2010-06-22 18:13:57 +02:00
|
|
|
// Loop over the value numbers of the RHS, seeing if any are defined from
|
|
|
|
// the LHS.
|
|
|
|
for (LiveInterval::vni_iterator i = RHS.vni_begin(), e = RHS.vni_end();
|
|
|
|
i != e; ++i) {
|
|
|
|
VNInfo *VNI = *i;
|
2010-09-25 20:10:38 +02:00
|
|
|
if (VNI->isUnused() || !VNI->isDefByCopy()) // Src not defined by a copy?
|
2010-06-22 18:13:57 +02:00
|
|
|
continue;
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2010-06-22 18:13:57 +02:00
|
|
|
// Never join with a register that has EarlyClobber redefs.
|
|
|
|
if (VNI->hasRedefByEC())
|
|
|
|
return false;
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2010-06-22 18:13:57 +02:00
|
|
|
// DstReg is known to be a register in the RHS interval. If the src is
|
|
|
|
// from the LHS interval, we can use its value #.
|
|
|
|
if (!CP.isCoalescable(VNI->getCopy()))
|
|
|
|
continue;
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2010-06-22 18:13:57 +02:00
|
|
|
// Figure out the value # from the LHS.
|
|
|
|
LiveRange *lr = LHS.getLiveRangeContaining(VNI->def.getPrevSlot());
|
|
|
|
// The copy could be to an aliased physreg.
|
|
|
|
if (!lr) continue;
|
|
|
|
RHSValsDefinedFromLHS[VNI] = lr->valno;
|
|
|
|
}
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2010-06-22 18:13:57 +02:00
|
|
|
LHSValNoAssignments.resize(LHS.getNumValNums(), -1);
|
|
|
|
RHSValNoAssignments.resize(RHS.getNumValNums(), -1);
|
|
|
|
NewVNInfo.reserve(LHS.getNumValNums() + RHS.getNumValNums());
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2010-06-22 18:13:57 +02:00
|
|
|
for (LiveInterval::vni_iterator i = LHS.vni_begin(), e = LHS.vni_end();
|
|
|
|
i != e; ++i) {
|
|
|
|
VNInfo *VNI = *i;
|
|
|
|
unsigned VN = VNI->id;
|
|
|
|
if (LHSValNoAssignments[VN] >= 0 || VNI->isUnused())
|
|
|
|
continue;
|
|
|
|
ComputeUltimateVN(VNI, NewVNInfo,
|
|
|
|
LHSValsDefinedFromRHS, RHSValsDefinedFromLHS,
|
|
|
|
LHSValNoAssignments, RHSValNoAssignments);
|
|
|
|
}
|
|
|
|
for (LiveInterval::vni_iterator i = RHS.vni_begin(), e = RHS.vni_end();
|
|
|
|
i != e; ++i) {
|
|
|
|
VNInfo *VNI = *i;
|
|
|
|
unsigned VN = VNI->id;
|
|
|
|
if (RHSValNoAssignments[VN] >= 0 || VNI->isUnused())
|
|
|
|
continue;
|
|
|
|
// If this value number isn't a copy from the LHS, it's a new number.
|
|
|
|
if (RHSValsDefinedFromLHS.find(VNI) == RHSValsDefinedFromLHS.end()) {
|
|
|
|
NewVNInfo.push_back(VNI);
|
|
|
|
RHSValNoAssignments[VN] = NewVNInfo.size()-1;
|
|
|
|
continue;
|
2007-06-08 19:18:56 +02:00
|
|
|
}
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2010-06-22 18:13:57 +02:00
|
|
|
ComputeUltimateVN(VNI, NewVNInfo,
|
|
|
|
RHSValsDefinedFromLHS, LHSValsDefinedFromRHS,
|
|
|
|
RHSValNoAssignments, LHSValNoAssignments);
|
2007-06-08 19:18:56 +02:00
|
|
|
}
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2007-06-08 19:18:56 +02:00
|
|
|
// Armed with the mappings of LHS/RHS values to ultimate values, walk the
|
2007-07-09 14:00:59 +02:00
|
|
|
// interval lists to see if these intervals are coalescable.
|
2007-06-08 19:18:56 +02:00
|
|
|
LiveInterval::const_iterator I = LHS.begin();
|
|
|
|
LiveInterval::const_iterator IE = LHS.end();
|
|
|
|
LiveInterval::const_iterator J = RHS.begin();
|
|
|
|
LiveInterval::const_iterator JE = RHS.end();
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2007-06-08 19:18:56 +02:00
|
|
|
// Skip ahead until the first place of potential sharing.
|
2010-06-24 20:15:01 +02:00
|
|
|
if (I != IE && J != JE) {
|
|
|
|
if (I->start < J->start) {
|
|
|
|
I = std::upper_bound(I, IE, J->start);
|
|
|
|
if (I != LHS.begin()) --I;
|
|
|
|
} else if (J->start < I->start) {
|
|
|
|
J = std::upper_bound(J, JE, I->start);
|
|
|
|
if (J != RHS.begin()) --J;
|
|
|
|
}
|
2007-06-08 19:18:56 +02:00
|
|
|
}
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2010-06-24 20:15:01 +02:00
|
|
|
while (I != IE && J != JE) {
|
2007-06-08 19:18:56 +02:00
|
|
|
// Determine if these two live ranges overlap.
|
|
|
|
bool Overlaps;
|
|
|
|
if (I->start < J->start) {
|
|
|
|
Overlaps = I->end > J->start;
|
|
|
|
} else {
|
|
|
|
Overlaps = J->end > I->start;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If so, check value # info to determine if they are really different.
|
|
|
|
if (Overlaps) {
|
|
|
|
// If the live range overlap will map to the same value number in the
|
2007-07-09 14:00:59 +02:00
|
|
|
// result liverange, we can still coalesce them. If not, we can't.
|
2007-08-29 22:45:00 +02:00
|
|
|
if (LHSValNoAssignments[I->valno->id] !=
|
|
|
|
RHSValNoAssignments[J->valno->id])
|
2007-06-08 19:18:56 +02:00
|
|
|
return false;
|
2009-12-01 23:25:00 +01:00
|
|
|
// If it's re-defined by an early clobber somewhere in the live range,
|
|
|
|
// then conservatively abort coalescing.
|
|
|
|
if (NewVNInfo[LHSValNoAssignments[I->valno->id]]->hasRedefByEC())
|
|
|
|
return false;
|
2007-06-08 19:18:56 +02:00
|
|
|
}
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2010-06-24 20:15:01 +02:00
|
|
|
if (I->end < J->end)
|
2007-06-08 19:18:56 +02:00
|
|
|
++I;
|
2010-06-24 20:15:01 +02:00
|
|
|
else
|
2007-06-08 19:18:56 +02:00
|
|
|
++J;
|
|
|
|
}
|
|
|
|
|
2007-10-14 12:08:34 +02:00
|
|
|
// Update kill info. Some live ranges are extended due to copy coalescing.
|
|
|
|
for (DenseMap<VNInfo*, VNInfo*>::iterator I = LHSValsDefinedFromRHS.begin(),
|
|
|
|
E = LHSValsDefinedFromRHS.end(); I != E; ++I) {
|
|
|
|
VNInfo *VNI = I->first;
|
|
|
|
unsigned LHSValID = LHSValNoAssignments[VNI->id];
|
2009-06-17 23:01:20 +02:00
|
|
|
if (VNI->hasPHIKill())
|
|
|
|
NewVNInfo[LHSValID]->setHasPHIKill(true);
|
2007-10-14 12:08:34 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// Update kill info. Some live ranges are extended due to copy coalescing.
|
|
|
|
for (DenseMap<VNInfo*, VNInfo*>::iterator I = RHSValsDefinedFromLHS.begin(),
|
|
|
|
E = RHSValsDefinedFromLHS.end(); I != E; ++I) {
|
|
|
|
VNInfo *VNI = I->first;
|
|
|
|
unsigned RHSValID = RHSValNoAssignments[VNI->id];
|
2009-06-17 23:01:20 +02:00
|
|
|
if (VNI->hasPHIKill())
|
|
|
|
NewVNInfo[RHSValID]->setHasPHIKill(true);
|
2007-10-14 12:08:34 +02:00
|
|
|
}
|
|
|
|
|
2010-06-24 20:15:01 +02:00
|
|
|
if (LHSValNoAssignments.empty())
|
|
|
|
LHSValNoAssignments.push_back(-1);
|
|
|
|
if (RHSValNoAssignments.empty())
|
|
|
|
RHSValNoAssignments.push_back(-1);
|
|
|
|
|
2007-07-09 14:00:59 +02:00
|
|
|
// If we get here, we know that we can coalesce the live ranges. Ask the
|
|
|
|
// intervals to coalesce themselves now.
|
2010-06-24 20:15:01 +02:00
|
|
|
LHS.join(RHS, &LHSValNoAssignments[0], &RHSValNoAssignments[0], NewVNInfo,
|
|
|
|
mri_);
|
2007-06-08 19:18:56 +02:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
namespace {
|
|
|
|
// DepthMBBCompare - Comparison predicate that sort first based on the loop
|
|
|
|
// depth of the basic block (the unsigned), and then on the MBB number.
|
|
|
|
struct DepthMBBCompare {
|
|
|
|
typedef std::pair<unsigned, MachineBasicBlock*> DepthMBBPair;
|
|
|
|
bool operator()(const DepthMBBPair &LHS, const DepthMBBPair &RHS) const {
|
2009-12-01 04:03:00 +01:00
|
|
|
// Deeper loops first
|
|
|
|
if (LHS.first != RHS.first)
|
|
|
|
return LHS.first > RHS.first;
|
|
|
|
|
|
|
|
// Prefer blocks that are more connected in the CFG. This takes care of
|
|
|
|
// the most difficult copies first while intervals are short.
|
|
|
|
unsigned cl = LHS.second->pred_size() + LHS.second->succ_size();
|
|
|
|
unsigned cr = RHS.second->pred_size() + RHS.second->succ_size();
|
|
|
|
if (cl != cr)
|
|
|
|
return cl > cr;
|
|
|
|
|
|
|
|
// As a last resort, sort by block number.
|
|
|
|
return LHS.second->getNumber() < RHS.second->getNumber();
|
2007-06-08 19:18:56 +02:00
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2007-07-09 14:00:59 +02:00
|
|
|
void SimpleRegisterCoalescing::CopyCoalesceInMBB(MachineBasicBlock *MBB,
|
2007-10-16 10:04:24 +02:00
|
|
|
std::vector<CopyRec> &TryAgain) {
|
2010-01-05 02:25:58 +01:00
|
|
|
DEBUG(dbgs() << MBB->getName() << ":\n");
|
2007-11-06 09:52:21 +01:00
|
|
|
|
2007-10-16 10:04:24 +02:00
|
|
|
std::vector<CopyRec> VirtCopies;
|
|
|
|
std::vector<CopyRec> PhysCopies;
|
2008-04-09 22:57:25 +02:00
|
|
|
std::vector<CopyRec> ImpDefCopies;
|
2007-06-08 19:18:56 +02:00
|
|
|
for (MachineBasicBlock::iterator MII = MBB->begin(), E = MBB->end();
|
|
|
|
MII != E;) {
|
|
|
|
MachineInstr *Inst = MII++;
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2007-10-12 10:50:34 +02:00
|
|
|
// If this isn't a copy nor a extract_subreg, we can't join intervals.
|
2010-07-16 06:45:42 +02:00
|
|
|
unsigned SrcReg, DstReg;
|
2010-07-08 18:40:22 +02:00
|
|
|
if (Inst->isCopy()) {
|
2007-10-12 10:50:34 +02:00
|
|
|
DstReg = Inst->getOperand(0).getReg();
|
|
|
|
SrcReg = Inst->getOperand(1).getReg();
|
2010-07-08 18:40:15 +02:00
|
|
|
} else if (Inst->isSubregToReg()) {
|
2008-04-09 22:57:25 +02:00
|
|
|
DstReg = Inst->getOperand(0).getReg();
|
|
|
|
SrcReg = Inst->getOperand(2).getReg();
|
2010-07-16 06:45:42 +02:00
|
|
|
} else
|
2007-10-12 10:50:34 +02:00
|
|
|
continue;
|
2007-10-16 10:04:24 +02:00
|
|
|
|
2008-02-15 19:24:29 +01:00
|
|
|
bool SrcIsPhys = TargetRegisterInfo::isPhysicalRegister(SrcReg);
|
|
|
|
bool DstIsPhys = TargetRegisterInfo::isPhysicalRegister(DstReg);
|
2010-07-16 06:45:42 +02:00
|
|
|
if (li_->hasInterval(SrcReg) && li_->getInterval(SrcReg).empty())
|
2009-09-12 04:14:41 +02:00
|
|
|
ImpDefCopies.push_back(CopyRec(Inst, 0));
|
|
|
|
else if (SrcIsPhys || DstIsPhys)
|
|
|
|
PhysCopies.push_back(CopyRec(Inst, 0));
|
|
|
|
else
|
|
|
|
VirtCopies.push_back(CopyRec(Inst, 0));
|
2007-10-16 10:04:24 +02:00
|
|
|
}
|
|
|
|
|
2009-12-11 07:01:00 +01:00
|
|
|
// Try coalescing implicit copies and insert_subreg <undef> first,
|
|
|
|
// followed by copies to / from physical registers, then finally copies
|
|
|
|
// from virtual registers to virtual registers.
|
2008-04-09 22:57:25 +02:00
|
|
|
for (unsigned i = 0, e = ImpDefCopies.size(); i != e; ++i) {
|
|
|
|
CopyRec &TheCopy = ImpDefCopies[i];
|
|
|
|
bool Again = false;
|
|
|
|
if (!JoinCopy(TheCopy, Again))
|
|
|
|
if (Again)
|
|
|
|
TryAgain.push_back(TheCopy);
|
|
|
|
}
|
2007-10-16 10:04:24 +02:00
|
|
|
for (unsigned i = 0, e = PhysCopies.size(); i != e; ++i) {
|
|
|
|
CopyRec &TheCopy = PhysCopies[i];
|
2007-11-01 07:22:48 +01:00
|
|
|
bool Again = false;
|
2007-11-06 09:52:21 +01:00
|
|
|
if (!JoinCopy(TheCopy, Again))
|
2007-11-01 07:22:48 +01:00
|
|
|
if (Again)
|
|
|
|
TryAgain.push_back(TheCopy);
|
2007-10-16 10:04:24 +02:00
|
|
|
}
|
|
|
|
for (unsigned i = 0, e = VirtCopies.size(); i != e; ++i) {
|
|
|
|
CopyRec &TheCopy = VirtCopies[i];
|
2007-11-01 07:22:48 +01:00
|
|
|
bool Again = false;
|
2007-11-06 09:52:21 +01:00
|
|
|
if (!JoinCopy(TheCopy, Again))
|
2007-11-01 07:22:48 +01:00
|
|
|
if (Again)
|
|
|
|
TryAgain.push_back(TheCopy);
|
2007-06-08 19:18:56 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void SimpleRegisterCoalescing::joinIntervals() {
|
2010-01-05 02:25:58 +01:00
|
|
|
DEBUG(dbgs() << "********** JOINING INTERVALS ***********\n");
|
2007-06-08 19:18:56 +02:00
|
|
|
|
|
|
|
std::vector<CopyRec> TryAgainList;
|
2008-08-14 20:13:49 +02:00
|
|
|
if (loopInfo->empty()) {
|
2007-06-08 19:18:56 +02:00
|
|
|
// If there are no loops in the function, join intervals in function order.
|
|
|
|
for (MachineFunction::iterator I = mf_->begin(), E = mf_->end();
|
|
|
|
I != E; ++I)
|
2007-10-16 10:04:24 +02:00
|
|
|
CopyCoalesceInMBB(I, TryAgainList);
|
2007-06-08 19:18:56 +02:00
|
|
|
} else {
|
|
|
|
// Otherwise, join intervals in inner loops before other intervals.
|
|
|
|
// Unfortunately we can't just iterate over loop hierarchy here because
|
|
|
|
// there may be more MBB's than BB's. Collect MBB's for sorting.
|
|
|
|
|
|
|
|
// Join intervals in the function prolog first. We want to join physical
|
|
|
|
// registers with virtual registers before the intervals got too long.
|
|
|
|
std::vector<std::pair<unsigned, MachineBasicBlock*> > MBBs;
|
2007-12-11 03:09:15 +01:00
|
|
|
for (MachineFunction::iterator I = mf_->begin(), E = mf_->end();I != E;++I){
|
|
|
|
MachineBasicBlock *MBB = I;
|
|
|
|
MBBs.push_back(std::make_pair(loopInfo->getLoopDepth(MBB), I));
|
|
|
|
}
|
2007-06-08 19:18:56 +02:00
|
|
|
|
|
|
|
// Sort by loop depth.
|
|
|
|
std::sort(MBBs.begin(), MBBs.end(), DepthMBBCompare());
|
|
|
|
|
|
|
|
// Finally, join intervals in loop nest order.
|
|
|
|
for (unsigned i = 0, e = MBBs.size(); i != e; ++i)
|
2007-10-16 10:04:24 +02:00
|
|
|
CopyCoalesceInMBB(MBBs[i].second, TryAgainList);
|
2007-06-08 19:18:56 +02:00
|
|
|
}
|
2009-09-20 04:20:51 +02:00
|
|
|
|
2007-06-08 19:18:56 +02:00
|
|
|
// Joining intervals can allow other intervals to be joined. Iteratively join
|
|
|
|
// until we make no progress.
|
2009-09-12 04:14:41 +02:00
|
|
|
bool ProgressMade = true;
|
|
|
|
while (ProgressMade) {
|
|
|
|
ProgressMade = false;
|
2007-11-06 09:52:21 +01:00
|
|
|
|
2009-09-12 04:14:41 +02:00
|
|
|
for (unsigned i = 0, e = TryAgainList.size(); i != e; ++i) {
|
|
|
|
CopyRec &TheCopy = TryAgainList[i];
|
|
|
|
if (!TheCopy.MI)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
bool Again = false;
|
|
|
|
bool Success = JoinCopy(TheCopy, Again);
|
|
|
|
if (Success || !Again) {
|
|
|
|
TheCopy.MI = 0; // Mark this one as done.
|
|
|
|
ProgressMade = true;
|
2007-06-08 19:18:56 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Return true if the two specified registers belong to different register
|
2009-01-23 03:15:19 +01:00
|
|
|
/// classes. The registers may be either phys or virt regs.
|
2008-06-19 03:39:21 +02:00
|
|
|
bool
|
2009-01-23 03:15:19 +01:00
|
|
|
SimpleRegisterCoalescing::differingRegisterClasses(unsigned RegA,
|
|
|
|
unsigned RegB) const {
|
2007-06-08 19:18:56 +02:00
|
|
|
// Get the register classes for the first reg.
|
2008-02-10 19:45:23 +01:00
|
|
|
if (TargetRegisterInfo::isPhysicalRegister(RegA)) {
|
|
|
|
assert(TargetRegisterInfo::isVirtualRegister(RegB) &&
|
2007-06-08 19:18:56 +02:00
|
|
|
"Shouldn't consider two physregs!");
|
2008-02-15 19:24:29 +01:00
|
|
|
return !mri_->getRegClass(RegB)->contains(RegA);
|
2007-06-08 19:18:56 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// Compare against the regclass for the second reg.
|
2008-06-19 03:39:21 +02:00
|
|
|
const TargetRegisterClass *RegClassA = mri_->getRegClass(RegA);
|
|
|
|
if (TargetRegisterInfo::isVirtualRegister(RegB)) {
|
|
|
|
const TargetRegisterClass *RegClassB = mri_->getRegClass(RegB);
|
2009-01-23 03:15:19 +01:00
|
|
|
return RegClassA != RegClassB;
|
2008-06-19 03:39:21 +02:00
|
|
|
}
|
|
|
|
return !RegClassA->contains(RegB);
|
2007-06-08 19:18:56 +02:00
|
|
|
}
|
|
|
|
|
2010-02-10 01:55:42 +01:00
|
|
|
/// lastRegisterUse - Returns the last (non-debug) use of the specific register
|
|
|
|
/// between cycles Start and End or NULL if there are no uses.
|
2008-02-15 19:24:29 +01:00
|
|
|
MachineOperand *
|
2009-11-04 00:52:08 +01:00
|
|
|
SimpleRegisterCoalescing::lastRegisterUse(SlotIndex Start,
|
|
|
|
SlotIndex End,
|
2009-09-04 22:41:11 +02:00
|
|
|
unsigned Reg,
|
2009-11-04 00:52:08 +01:00
|
|
|
SlotIndex &UseIdx) const{
|
|
|
|
UseIdx = SlotIndex();
|
2008-02-15 19:24:29 +01:00
|
|
|
if (TargetRegisterInfo::isVirtualRegister(Reg)) {
|
|
|
|
MachineOperand *LastUse = NULL;
|
2010-02-10 01:55:42 +01:00
|
|
|
for (MachineRegisterInfo::use_nodbg_iterator I = mri_->use_nodbg_begin(Reg),
|
|
|
|
E = mri_->use_nodbg_end(); I != E; ++I) {
|
2008-02-15 19:24:29 +01:00
|
|
|
MachineOperand &Use = I.getOperand();
|
|
|
|
MachineInstr *UseMI = Use.getParent();
|
2010-07-08 18:40:22 +02:00
|
|
|
if (UseMI->isIdentityCopy())
|
|
|
|
continue;
|
2009-11-04 00:52:08 +01:00
|
|
|
SlotIndex Idx = li_->getInstructionIndex(UseMI);
|
|
|
|
// FIXME: Should this be Idx != UseIdx? SlotIndex() will return something
|
|
|
|
// that compares higher than any other interval.
|
2008-02-15 19:24:29 +01:00
|
|
|
if (Idx >= Start && Idx < End && Idx >= UseIdx) {
|
|
|
|
LastUse = &Use;
|
2009-11-04 00:52:08 +01:00
|
|
|
UseIdx = Idx.getUseIndex();
|
2008-02-15 19:24:29 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return LastUse;
|
|
|
|
}
|
|
|
|
|
2009-11-04 00:52:08 +01:00
|
|
|
SlotIndex s = Start;
|
|
|
|
SlotIndex e = End.getPrevSlot().getBaseIndex();
|
2007-06-08 19:18:56 +02:00
|
|
|
while (e >= s) {
|
|
|
|
// Skip deleted instructions
|
|
|
|
MachineInstr *MI = li_->getInstructionFromIndex(e);
|
2009-11-04 00:52:08 +01:00
|
|
|
while (e != SlotIndex() && e.getPrevIndex() >= s && !MI) {
|
|
|
|
e = e.getPrevIndex();
|
2007-06-08 19:18:56 +02:00
|
|
|
MI = li_->getInstructionFromIndex(e);
|
|
|
|
}
|
|
|
|
if (e < s || MI == NULL)
|
|
|
|
return NULL;
|
|
|
|
|
2008-03-25 03:02:19 +01:00
|
|
|
// Ignore identity copies.
|
2010-07-16 06:45:42 +02:00
|
|
|
if (!MI->isIdentityCopy())
|
2008-03-25 03:02:19 +01:00
|
|
|
for (unsigned i = 0, NumOps = MI->getNumOperands(); i != NumOps; ++i) {
|
|
|
|
MachineOperand &Use = MI->getOperand(i);
|
2008-10-03 17:45:36 +02:00
|
|
|
if (Use.isReg() && Use.isUse() && Use.getReg() &&
|
2008-03-25 03:02:19 +01:00
|
|
|
tri_->regsOverlap(Use.getReg(), Reg)) {
|
2009-11-04 00:52:08 +01:00
|
|
|
UseIdx = e.getUseIndex();
|
2008-03-25 03:02:19 +01:00
|
|
|
return &Use;
|
|
|
|
}
|
2007-06-08 19:18:56 +02:00
|
|
|
}
|
|
|
|
|
2009-11-04 00:52:08 +01:00
|
|
|
e = e.getPrevIndex();
|
2007-06-08 19:18:56 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
void SimpleRegisterCoalescing::releaseMemory() {
|
2007-11-06 09:52:21 +01:00
|
|
|
JoinedCopies.clear();
|
2008-08-30 11:09:33 +02:00
|
|
|
ReMatCopies.clear();
|
2008-09-19 19:38:47 +02:00
|
|
|
ReMatDefs.clear();
|
2007-06-08 19:18:56 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
bool SimpleRegisterCoalescing::runOnMachineFunction(MachineFunction &fn) {
|
|
|
|
mf_ = &fn;
|
2008-02-13 04:01:43 +01:00
|
|
|
mri_ = &fn.getRegInfo();
|
2007-06-08 19:18:56 +02:00
|
|
|
tm_ = &fn.getTarget();
|
2008-02-10 19:45:23 +01:00
|
|
|
tri_ = tm_->getRegisterInfo();
|
2007-06-08 19:18:56 +02:00
|
|
|
tii_ = tm_->getInstrInfo();
|
|
|
|
li_ = &getAnalysis<LiveIntervals>();
|
2009-10-10 01:27:56 +02:00
|
|
|
AA = &getAnalysis<AliasAnalysis>();
|
2007-12-11 03:09:15 +01:00
|
|
|
loopInfo = &getAnalysis<MachineLoopInfo>();
|
2007-06-08 19:18:56 +02:00
|
|
|
|
2010-01-05 02:25:58 +01:00
|
|
|
DEBUG(dbgs() << "********** SIMPLE REGISTER COALESCING **********\n"
|
2009-08-22 22:52:46 +02:00
|
|
|
<< "********** Function: "
|
|
|
|
<< ((Value*)mf_->getFunction())->getName() << '\n');
|
2007-06-08 19:18:56 +02:00
|
|
|
|
2008-02-10 19:45:23 +01:00
|
|
|
for (TargetRegisterInfo::regclass_iterator I = tri_->regclass_begin(),
|
|
|
|
E = tri_->regclass_end(); I != E; ++I)
|
2008-01-04 09:59:18 +01:00
|
|
|
allocatableRCRegs_.insert(std::make_pair(*I,
|
2008-02-10 19:45:23 +01:00
|
|
|
tri_->getAllocatableSet(fn, *I)));
|
2007-06-08 19:18:56 +02:00
|
|
|
|
2007-07-09 14:00:59 +02:00
|
|
|
// Join (coalesce) intervals if requested.
|
2007-06-08 19:18:56 +02:00
|
|
|
if (EnableJoining) {
|
|
|
|
joinIntervals();
|
2008-12-19 03:09:57 +01:00
|
|
|
DEBUG({
|
2010-01-05 02:25:58 +01:00
|
|
|
dbgs() << "********** INTERVALS POST JOINING **********\n";
|
2009-12-03 01:50:42 +01:00
|
|
|
for (LiveIntervals::iterator I = li_->begin(), E = li_->end();
|
|
|
|
I != E; ++I){
|
2010-01-05 02:25:58 +01:00
|
|
|
I->second->print(dbgs(), tri_);
|
|
|
|
dbgs() << "\n";
|
2008-12-19 03:09:57 +01:00
|
|
|
}
|
|
|
|
});
|
2007-06-08 19:18:56 +02:00
|
|
|
}
|
|
|
|
|
2008-02-15 19:24:29 +01:00
|
|
|
// Perform a final pass over the instructions and compute spill weights
|
|
|
|
// and remove identity moves.
|
2008-10-28 00:21:01 +01:00
|
|
|
SmallVector<unsigned, 4> DeadDefs;
|
2007-06-08 19:18:56 +02:00
|
|
|
for (MachineFunction::iterator mbbi = mf_->begin(), mbbe = mf_->end();
|
|
|
|
mbbi != mbbe; ++mbbi) {
|
|
|
|
MachineBasicBlock* mbb = mbbi;
|
|
|
|
for (MachineBasicBlock::iterator mii = mbb->begin(), mie = mbb->end();
|
|
|
|
mii != mie; ) {
|
2008-04-24 11:06:33 +02:00
|
|
|
MachineInstr *MI = mii;
|
|
|
|
if (JoinedCopies.count(MI)) {
|
|
|
|
// Delete all coalesced copies.
|
2009-09-28 07:28:43 +02:00
|
|
|
bool DoDelete = true;
|
2010-07-16 06:45:42 +02:00
|
|
|
assert(MI->isCopyLike() && "Unrecognized copy instruction");
|
|
|
|
unsigned SrcReg = MI->getOperand(MI->isSubregToReg() ? 2 : 1).getReg();
|
2010-08-06 01:51:28 +02:00
|
|
|
if (TargetRegisterInfo::isPhysicalRegister(SrcReg) &&
|
|
|
|
MI->getNumOperands() > 2)
|
2010-07-16 06:45:42 +02:00
|
|
|
// Do not delete extract_subreg, insert_subreg of physical
|
|
|
|
// registers unless the definition is dead. e.g.
|
|
|
|
// %DO<def> = INSERT_SUBREG %D0<undef>, %S0<kill>, 1
|
|
|
|
// or else the scavenger may complain. LowerSubregs will
|
|
|
|
// delete them later.
|
|
|
|
DoDelete = false;
|
|
|
|
|
2010-04-08 22:02:37 +02:00
|
|
|
if (MI->allDefsAreDead()) {
|
2010-06-24 20:15:01 +02:00
|
|
|
LiveInterval &li = li_->getInterval(SrcReg);
|
2008-04-24 11:06:33 +02:00
|
|
|
if (!ShortenDeadCopySrcLiveRange(li, MI))
|
|
|
|
ShortenDeadCopyLiveRange(li, MI);
|
2009-09-28 07:28:43 +02:00
|
|
|
DoDelete = true;
|
|
|
|
}
|
2010-08-06 01:51:28 +02:00
|
|
|
if (!DoDelete) {
|
|
|
|
// We need the instruction to adjust liveness, so make it a KILL.
|
|
|
|
if (MI->isSubregToReg()) {
|
|
|
|
MI->RemoveOperand(3);
|
|
|
|
MI->RemoveOperand(1);
|
|
|
|
}
|
|
|
|
MI->setDesc(tii_->get(TargetOpcode::KILL));
|
2009-12-03 01:50:42 +01:00
|
|
|
mii = llvm::next(mii);
|
2010-08-06 01:51:28 +02:00
|
|
|
} else {
|
2009-09-28 07:28:43 +02:00
|
|
|
li_->RemoveMachineInstrFromMaps(MI);
|
|
|
|
mii = mbbi->erase(mii);
|
2009-10-26 05:56:07 +01:00
|
|
|
++numPeep;
|
2008-04-24 11:06:33 +02:00
|
|
|
}
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2008-09-19 19:38:47 +02:00
|
|
|
// Now check if this is a remat'ed def instruction which is now dead.
|
|
|
|
if (ReMatDefs.count(MI)) {
|
|
|
|
bool isDead = true;
|
|
|
|
for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
|
|
|
|
const MachineOperand &MO = MI->getOperand(i);
|
2008-10-28 00:21:01 +01:00
|
|
|
if (!MO.isReg())
|
2008-09-19 19:38:47 +02:00
|
|
|
continue;
|
|
|
|
unsigned Reg = MO.getReg();
|
2009-02-04 19:18:58 +01:00
|
|
|
if (!Reg)
|
|
|
|
continue;
|
2008-10-28 00:21:01 +01:00
|
|
|
if (TargetRegisterInfo::isVirtualRegister(Reg))
|
|
|
|
DeadDefs.push_back(Reg);
|
|
|
|
if (MO.isDead())
|
|
|
|
continue;
|
2008-09-19 19:38:47 +02:00
|
|
|
if (TargetRegisterInfo::isPhysicalRegister(Reg) ||
|
2010-03-25 02:01:37 +01:00
|
|
|
!mri_->use_nodbg_empty(Reg)) {
|
2008-09-19 19:38:47 +02:00
|
|
|
isDead = false;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (isDead) {
|
2008-10-28 00:21:01 +01:00
|
|
|
while (!DeadDefs.empty()) {
|
|
|
|
unsigned DeadDef = DeadDefs.back();
|
|
|
|
DeadDefs.pop_back();
|
|
|
|
RemoveDeadDef(li_->getInterval(DeadDef), MI);
|
|
|
|
}
|
2008-09-19 19:38:47 +02:00
|
|
|
li_->RemoveMachineInstrFromMaps(mii);
|
|
|
|
mii = mbbi->erase(mii);
|
2008-09-20 00:49:39 +02:00
|
|
|
continue;
|
2008-10-28 00:21:01 +01:00
|
|
|
} else
|
|
|
|
DeadDefs.clear();
|
2008-09-19 19:38:47 +02:00
|
|
|
}
|
|
|
|
|
2008-04-24 11:06:33 +02:00
|
|
|
// If the move will be an identity move delete it
|
2010-07-16 06:45:42 +02:00
|
|
|
if (MI->isIdentityCopy()) {
|
|
|
|
unsigned SrcReg = MI->getOperand(1).getReg();
|
2008-04-24 11:06:33 +02:00
|
|
|
if (li_->hasInterval(SrcReg)) {
|
|
|
|
LiveInterval &RegInt = li_->getInterval(SrcReg);
|
2008-03-18 09:26:47 +01:00
|
|
|
// If def of this move instruction is dead, remove its live range
|
2010-06-25 22:42:55 +02:00
|
|
|
// from the destination register's live interval.
|
|
|
|
if (MI->allDefsAreDead()) {
|
2008-09-19 19:38:47 +02:00
|
|
|
if (!ShortenDeadCopySrcLiveRange(RegInt, MI))
|
|
|
|
ShortenDeadCopyLiveRange(RegInt, MI);
|
2008-03-18 09:26:47 +01:00
|
|
|
}
|
|
|
|
}
|
2008-09-19 19:38:47 +02:00
|
|
|
li_->RemoveMachineInstrFromMaps(MI);
|
2007-06-08 19:18:56 +02:00
|
|
|
mii = mbbi->erase(mii);
|
|
|
|
++numPeep;
|
2010-04-28 20:28:39 +02:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
++mii;
|
|
|
|
|
|
|
|
// Check for now unnecessary kill flags.
|
|
|
|
if (li_->isNotInMIMap(MI)) continue;
|
2010-06-26 00:53:05 +02:00
|
|
|
SlotIndex DefIdx = li_->getInstructionIndex(MI).getDefIndex();
|
2010-04-28 20:28:39 +02:00
|
|
|
for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
|
|
|
|
MachineOperand &MO = MI->getOperand(i);
|
|
|
|
if (!MO.isReg() || !MO.isKill()) continue;
|
|
|
|
unsigned reg = MO.getReg();
|
|
|
|
if (!reg || !li_->hasInterval(reg)) continue;
|
2010-10-20 20:45:55 +02:00
|
|
|
if (!li_->getInterval(reg).killedAt(DefIdx)) {
|
2010-04-28 20:28:39 +02:00
|
|
|
MO.setIsKill(false);
|
2010-10-20 20:45:55 +02:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
// When leaving a kill flag on a physreg, check if any subregs should
|
|
|
|
// remain alive.
|
|
|
|
if (!TargetRegisterInfo::isPhysicalRegister(reg))
|
|
|
|
continue;
|
|
|
|
for (const unsigned *SR = tri_->getSubRegisters(reg);
|
|
|
|
unsigned S = *SR; ++SR)
|
|
|
|
if (li_->hasInterval(S) && li_->getInterval(S).liveAt(DefIdx))
|
|
|
|
MI->addRegisterDefined(S, tri_);
|
2007-06-08 19:18:56 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
DEBUG(dump());
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// print - Implement the dump method.
|
2009-08-23 08:03:38 +02:00
|
|
|
void SimpleRegisterCoalescing::print(raw_ostream &O, const Module* m) const {
|
2007-06-08 19:18:56 +02:00
|
|
|
li_->print(O, m);
|
|
|
|
}
|
2007-09-06 18:18:45 +02:00
|
|
|
|
|
|
|
RegisterCoalescer* llvm::createSimpleRegisterCoalescer() {
|
|
|
|
return new SimpleRegisterCoalescing();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Make sure that anything that uses RegisterCoalescer pulls in this file...
|
|
|
|
DEFINING_FILE_FOR(SimpleRegisterCoalescing)
|