From cece848032e7d8ebe94129a4af76769a81b7075f Mon Sep 17 00:00:00 2001 From: Matt Arsenault Date: Mon, 27 May 2019 20:37:31 +0000 Subject: [PATCH] RegAllocFast: Set MayLiveAcrossBlocks when allocating uses Setting mayLiveOut based only on use instructions after allocating the def block did not work if the use block was allocated before the def block, since the virtual register uses were already removed. Fixes bug 41973. llvm-svn: 361781 --- lib/CodeGen/RegAllocFast.cpp | 28 +++++++- .../regalloc-fast-missing-live-out-spill.mir | 66 +++++++++++++++++++ 2 files changed, 93 insertions(+), 1 deletion(-) create mode 100644 test/CodeGen/X86/regalloc-fast-missing-live-out-spill.mir diff --git a/lib/CodeGen/RegAllocFast.cpp b/lib/CodeGen/RegAllocFast.cpp index 4da0912508d..2ffa5e389f8 100644 --- a/lib/CodeGen/RegAllocFast.cpp +++ b/lib/CodeGen/RegAllocFast.cpp @@ -226,6 +226,7 @@ namespace { MCPhysReg PhysReg); bool mayLiveOut(unsigned VirtReg); + bool mayLiveIn(unsigned VirtReg); void dumpState(); }; @@ -270,8 +271,10 @@ bool RegAllocFast::mayLiveOut(unsigned VirtReg) { // If this block loops back to itself, it would be necessary to check whether // the use comes after the def. - if (MBB->isSuccessor(MBB)) + if (MBB->isSuccessor(MBB)) { + MayLiveAcrossBlocks.set(TargetRegisterInfo::virtReg2Index(VirtReg)); return true; + } // See if the first \p Limit uses of the register are all in the current // block. @@ -288,6 +291,24 @@ bool RegAllocFast::mayLiveOut(unsigned VirtReg) { return false; } +/// Returns false if \p VirtReg is known to not be live into the current block. +bool RegAllocFast::mayLiveIn(unsigned VirtReg) { + if (MayLiveAcrossBlocks.test(TargetRegisterInfo::virtReg2Index(VirtReg))) + return !MBB->pred_empty(); + + // See if the first \p Limit def of the register are all in the current block. + static const unsigned Limit = 8; + unsigned C = 0; + for (const MachineInstr &DefInst : MRI->def_instructions(VirtReg)) { + if (DefInst.getParent() != MBB || ++C >= Limit) { + MayLiveAcrossBlocks.set(TargetRegisterInfo::virtReg2Index(VirtReg)); + return !MBB->pred_empty(); + } + } + + return false; +} + /// Insert spill instruction for \p AssignedReg before \p Before. Update /// DBG_VALUEs with \p VirtReg operands with the stack slot. void RegAllocFast::spill(MachineBasicBlock::iterator Before, unsigned VirtReg, @@ -1083,6 +1104,11 @@ void RegAllocFast::allocateInstruction(MachineInstr &MI) { // There is no need to allocate a register for an undef use. continue; } + + // Populate MayLiveAcrossBlocks in case the use block is allocated before + // the def block (removing the vreg uses). + mayLiveIn(Reg); + LiveReg &LR = reloadVirtReg(MI, I, Reg, CopyDstReg); MCPhysReg PhysReg = LR.PhysReg; CopySrcReg = (CopySrcReg == Reg || CopySrcReg == PhysReg) ? PhysReg : 0; diff --git a/test/CodeGen/X86/regalloc-fast-missing-live-out-spill.mir b/test/CodeGen/X86/regalloc-fast-missing-live-out-spill.mir new file mode 100644 index 00000000000..0fe9f60897f --- /dev/null +++ b/test/CodeGen/X86/regalloc-fast-missing-live-out-spill.mir @@ -0,0 +1,66 @@ +# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py +# RUN: llc -mtriple=x86_64-grtev4-linux-gnu -run-pass=regallocfast -o - %s | FileCheck %s + +# Bug 41973. Make sure %12 is detected as live out of %bb.0, even +# though the use is allocated before the def block %bb.3. Previously +# mayLiveOut only recorded on defs, and would not find the virtual +# register use if it had already been replace with a physical +# register. + +--- +name: main +tracksRegLiveness: true +body: | + ; CHECK-LABEL: name: main + ; CHECK: bb.0: + ; CHECK: successors: %bb.3(0x80000000) + ; CHECK: liveins: $edi, $rsi + ; CHECK: MOV64mr %stack.0, 1, $noreg, 0, $noreg, killed $rsi :: (store 8 into %stack.0) + ; CHECK: JMP_1 %bb.3 + ; CHECK: bb.1: + ; CHECK: successors: + ; CHECK: bb.2: + ; CHECK: successors: %bb.3(0x80000000) + ; CHECK: $rax = MOV64rm %stack.1, 1, $noreg, 0, $noreg :: (load 8 from %stack.1) + ; CHECK: renamable $ecx = MOV32r0 implicit-def $eflags + ; CHECK: renamable $rdx = SUBREG_TO_REG 0, killed renamable $ecx, %subreg.sub_32bit + ; CHECK: MOV64mi32 killed renamable $rax, 1, $noreg, 0, $noreg, 0 :: (volatile store 8) + ; CHECK: MOV64mr %stack.0, 1, $noreg, 0, $noreg, killed $rdx :: (store 8 into %stack.0) + ; CHECK: bb.3: + ; CHECK: successors: %bb.2(0x40000000), %bb.1(0x40000000) + ; CHECK: $rax = MOV64rm %stack.0, 1, $noreg, 0, $noreg :: (load 8 from %stack.0) + ; CHECK: renamable $ecx = MOV32r0 implicit-def dead $eflags + ; CHECK: renamable $rdx = SUBREG_TO_REG 0, killed renamable $ecx, %subreg.sub_32bit + ; CHECK: MOV64mr %stack.1, 1, $noreg, 0, $noreg, killed $rdx :: (store 8 into %stack.1) + ; CHECK: JMP64r killed renamable $rax + bb.0: + liveins: $edi, $rsi + + %4:gr64 = COPY $rsi + %2:gr32 = COPY $edi + %3:gr32 = COPY killed %2 + %5:gr64 = COPY killed %4 + %13:gr64 = COPY %5 + JMP_1 %bb.3 + + bb.1: + successors: + + + bb.2: + %0:gr64 = COPY %12 + %10:gr32 = MOV32r0 implicit-def $eflags + %11:gr64 = SUBREG_TO_REG 0, %10, %subreg.sub_32bit + MOV64mi32 %0, 1, $noreg, 0, $noreg, 0 :: (volatile store 8) + %13:gr64 = COPY %11 + + bb.3: + successors: %bb.2, %bb.1 + + %1:gr64 = COPY %13 + %9:gr32 = MOV32r0 implicit-def dead $eflags + %8:gr64 = SUBREG_TO_REG 0, killed %9, %subreg.sub_32bit + %12:gr64 = COPY %8 + JMP64r %1 + +...