mirror of
https://github.com/RPCS3/llvm-mirror.git
synced 2024-11-24 19:52:54 +01:00
Teach the local spiller to turn stack slot loads into register-register copies
when possible, avoiding the load (and avoiding the copy if the value is already in the right register). This patch came about when I noticed code like the following being generated: store R17 -> [SS1] ...blah... R4 = load [SS1] This was causing an LSU reject on the G5. This problem was due to the register allocator folding spill code into a reg-reg copy (producing the load), which prevented the spiller from being able to rewrite the load into a copy, despite the fact that the value was already available in a register. In the case above, we now rip out the R4 load and replace it with a R4 = R17 copy. This speeds up several programs on X86 (which spills a lot :) ), e.g. smg2k from 22.39->20.60s, povray from 12.93->12.66s, 168.wupwise from 68.54->53.83s (!), 197.parser from 7.33->6.62s (!), etc. This may have a larger impact in some cases on the G5 (by avoiding LSU rejects), though it probably won't trigger as often (less spilling in general). Targets that implement folding of loads/stores into copies should implement the isLoadFromStackSlot hook to get this. llvm-svn: 23388
This commit is contained in:
parent
4a8f6d97ff
commit
59dd979162
@ -466,36 +466,61 @@ void LocalSpiller::RewriteMBB(MachineBasicBlock &MBB, const VirtRegMap &VRM) {
|
|||||||
<< I->second.second);
|
<< I->second.second);
|
||||||
unsigned VirtReg = I->second.first;
|
unsigned VirtReg = I->second.first;
|
||||||
VirtRegMap::ModRef MR = I->second.second;
|
VirtRegMap::ModRef MR = I->second.second;
|
||||||
if (VRM.hasStackSlot(VirtReg)) {
|
if (!VRM.hasStackSlot(VirtReg)) {
|
||||||
int SS = VRM.getStackSlot(VirtReg);
|
|
||||||
DEBUG(std::cerr << " - StackSlot: " << SS << "\n");
|
|
||||||
|
|
||||||
// If this reference is not a use, any previous store is now dead.
|
|
||||||
// Otherwise, the store to this stack slot is not dead anymore.
|
|
||||||
std::map<int, MachineInstr*>::iterator MDSI = MaybeDeadStores.find(SS);
|
|
||||||
if (MDSI != MaybeDeadStores.end()) {
|
|
||||||
if (MR & VirtRegMap::isRef) // Previous store is not dead.
|
|
||||||
MaybeDeadStores.erase(MDSI);
|
|
||||||
else {
|
|
||||||
// If we get here, the store is dead, nuke it now.
|
|
||||||
assert(MR == VirtRegMap::isMod && "Can't be modref!");
|
|
||||||
MBB.erase(MDSI->second);
|
|
||||||
MaybeDeadStores.erase(MDSI);
|
|
||||||
++NumDSE;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the spill slot value is available, and this is a new definition of
|
|
||||||
// the value, the value is not available anymore.
|
|
||||||
if (MR & VirtRegMap::isMod) {
|
|
||||||
std::map<int, unsigned>::iterator It = SpillSlotsAvailable.find(SS);
|
|
||||||
if (It != SpillSlotsAvailable.end()) {
|
|
||||||
PhysRegsAvailable.erase(It->second);
|
|
||||||
SpillSlotsAvailable.erase(It);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
DEBUG(std::cerr << ": No stack slot!\n");
|
DEBUG(std::cerr << ": No stack slot!\n");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
int SS = VRM.getStackSlot(VirtReg);
|
||||||
|
DEBUG(std::cerr << " - StackSlot: " << SS << "\n");
|
||||||
|
|
||||||
|
// If this folded instruction is just a use, check to see if it's a
|
||||||
|
// straight load from the virt reg slot.
|
||||||
|
if ((MR & VirtRegMap::isRef) && !(MR & VirtRegMap::isMod)) {
|
||||||
|
int FrameIdx;
|
||||||
|
if (unsigned DestReg = MRI->isLoadFromStackSlot(&MI, FrameIdx)) {
|
||||||
|
// If this spill slot is available, insert a copy for it!
|
||||||
|
std::map<int, unsigned>::iterator It = SpillSlotsAvailable.find(SS);
|
||||||
|
if (FrameIdx == SS && It != SpillSlotsAvailable.end()) {
|
||||||
|
DEBUG(std::cerr << "Promoted Load To Copy: " << MI);
|
||||||
|
MachineFunction &MF = *MBB.getParent();
|
||||||
|
if (DestReg != It->second) {
|
||||||
|
MRI->copyRegToReg(MBB, &MI, DestReg, It->second,
|
||||||
|
MF.getSSARegMap()->getRegClass(VirtReg));
|
||||||
|
// Revisit the copy if the destination is a vreg.
|
||||||
|
if (MRegisterInfo::isVirtualRegister(DestReg)) {
|
||||||
|
NextMII = &MI;
|
||||||
|
--NextMII; // backtrack to the copy.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
MBB.erase(&MI);
|
||||||
|
goto ProcessNextInst;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If this reference is not a use, any previous store is now dead.
|
||||||
|
// Otherwise, the store to this stack slot is not dead anymore.
|
||||||
|
std::map<int, MachineInstr*>::iterator MDSI = MaybeDeadStores.find(SS);
|
||||||
|
if (MDSI != MaybeDeadStores.end()) {
|
||||||
|
if (MR & VirtRegMap::isRef) // Previous store is not dead.
|
||||||
|
MaybeDeadStores.erase(MDSI);
|
||||||
|
else {
|
||||||
|
// If we get here, the store is dead, nuke it now.
|
||||||
|
assert(MR == VirtRegMap::isMod && "Can't be modref!");
|
||||||
|
MBB.erase(MDSI->second);
|
||||||
|
MaybeDeadStores.erase(MDSI);
|
||||||
|
++NumDSE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the spill slot value is available, and this is a new definition of
|
||||||
|
// the value, the value is not available anymore.
|
||||||
|
if (MR & VirtRegMap::isMod) {
|
||||||
|
std::map<int, unsigned>::iterator It = SpillSlotsAvailable.find(SS);
|
||||||
|
if (It != SpillSlotsAvailable.end()) {
|
||||||
|
PhysRegsAvailable.erase(It->second);
|
||||||
|
SpillSlotsAvailable.erase(It);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -575,6 +600,7 @@ void LocalSpiller::RewriteMBB(MachineBasicBlock &MBB, const VirtRegMap &VRM) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
ProcessNextInst:
|
||||||
MII = NextMII;
|
MII = NextMII;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user