1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2024-10-19 02:52:53 +02:00

AArch64: Fix XSeqPairs/WSeqPairs problems

- Fix spill/reloads of XSeqPairs failing with vregs (only physregs
  worked correctly)
- Add missing spill/reload code for WSeqPairs class

Differential Revision: https://reviews.llvm.org/D52761

llvm-svn: 343799
This commit is contained in:
Matthias Braun 2018-10-04 17:02:53 +00:00
parent c2106b631d
commit 1f941370f1
2 changed files with 110 additions and 18 deletions

View File

@ -2744,6 +2744,29 @@ void AArch64InstrInfo::copyPhysReg(MachineBasicBlock &MBB,
llvm_unreachable("unimplemented reg-to-reg copy");
}
static void storeRegPairToStackSlot(const TargetRegisterInfo &TRI,
MachineBasicBlock &MBB,
MachineBasicBlock::iterator InsertBefore,
const MCInstrDesc &MCID,
unsigned SrcReg, bool IsKill,
unsigned SubIdx0, unsigned SubIdx1, int FI,
MachineMemOperand *MMO) {
unsigned SrcReg0 = SrcReg;
unsigned SrcReg1 = SrcReg;
if (TargetRegisterInfo::isPhysicalRegister(SrcReg)) {
SrcReg0 = TRI.getSubReg(SrcReg, SubIdx0);
SubIdx0 = 0;
SrcReg1 = TRI.getSubReg(SrcReg, SubIdx1);
SubIdx1 = 0;
}
BuildMI(MBB, InsertBefore, DebugLoc(), MCID)
.addReg(SrcReg0, getKillRegState(IsKill), SubIdx0)
.addReg(SrcReg1, getKillRegState(IsKill), SubIdx1)
.addFrameIndex(FI)
.addImm(0)
.addMemOperand(MMO);
}
void AArch64InstrInfo::storeRegToStackSlot(
MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, unsigned SrcReg,
bool isKill, int FI, const TargetRegisterClass *RC,
@ -2786,8 +2809,14 @@ void AArch64InstrInfo::storeRegToStackSlot(
MF.getRegInfo().constrainRegClass(SrcReg, &AArch64::GPR64RegClass);
else
assert(SrcReg != AArch64::SP);
} else if (AArch64::FPR64RegClass.hasSubClassEq(RC))
} else if (AArch64::FPR64RegClass.hasSubClassEq(RC)) {
Opc = AArch64::STRDui;
} else if (AArch64::WSeqPairsClassRegClass.hasSubClassEq(RC)) {
storeRegPairToStackSlot(getRegisterInfo(), MBB, MBBI,
get(AArch64::STPWi), SrcReg, isKill,
AArch64::sube32, AArch64::subo32, FI, MMO);
return;
}
break;
case 16:
if (AArch64::FPR128RegClass.hasSubClassEq(RC))
@ -2797,14 +2826,9 @@ void AArch64InstrInfo::storeRegToStackSlot(
Opc = AArch64::ST1Twov1d;
Offset = false;
} else if (AArch64::XSeqPairsClassRegClass.hasSubClassEq(RC)) {
BuildMI(MBB, MBBI, DL, get(AArch64::STPXi))
.addReg(TRI->getSubReg(SrcReg, AArch64::sube64),
getKillRegState(isKill))
.addReg(TRI->getSubReg(SrcReg, AArch64::subo64),
getKillRegState(isKill))
.addFrameIndex(FI)
.addImm(0)
.addMemOperand(MMO);
storeRegPairToStackSlot(getRegisterInfo(), MBB, MBBI,
get(AArch64::STPXi), SrcReg, isKill,
AArch64::sube64, AArch64::subo64, FI, MMO);
return;
}
break;
@ -2852,6 +2876,31 @@ void AArch64InstrInfo::storeRegToStackSlot(
MI.addMemOperand(MMO);
}
static void loadRegPairFromStackSlot(const TargetRegisterInfo &TRI,
MachineBasicBlock &MBB,
MachineBasicBlock::iterator InsertBefore,
const MCInstrDesc &MCID,
unsigned DestReg, unsigned SubIdx0,
unsigned SubIdx1, int FI,
MachineMemOperand *MMO) {
unsigned DestReg0 = DestReg;
unsigned DestReg1 = DestReg;
bool IsUndef = true;
if (TargetRegisterInfo::isPhysicalRegister(DestReg)) {
DestReg0 = TRI.getSubReg(DestReg, SubIdx0);
SubIdx0 = 0;
DestReg1 = TRI.getSubReg(DestReg, SubIdx1);
SubIdx1 = 0;
IsUndef = false;
}
BuildMI(MBB, InsertBefore, DebugLoc(), MCID)
.addReg(DestReg0, RegState::Define | getUndefRegState(IsUndef), SubIdx0)
.addReg(DestReg1, RegState::Define | getUndefRegState(IsUndef), SubIdx1)
.addFrameIndex(FI)
.addImm(0)
.addMemOperand(MMO);
}
void AArch64InstrInfo::loadRegFromStackSlot(
MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, unsigned DestReg,
int FI, const TargetRegisterClass *RC,
@ -2894,8 +2943,14 @@ void AArch64InstrInfo::loadRegFromStackSlot(
MF.getRegInfo().constrainRegClass(DestReg, &AArch64::GPR64RegClass);
else
assert(DestReg != AArch64::SP);
} else if (AArch64::FPR64RegClass.hasSubClassEq(RC))
} else if (AArch64::FPR64RegClass.hasSubClassEq(RC)) {
Opc = AArch64::LDRDui;
} else if (AArch64::WSeqPairsClassRegClass.hasSubClassEq(RC)) {
loadRegPairFromStackSlot(getRegisterInfo(), MBB, MBBI,
get(AArch64::LDPWi), DestReg, AArch64::sube32,
AArch64::subo32, FI, MMO);
return;
}
break;
case 16:
if (AArch64::FPR128RegClass.hasSubClassEq(RC))
@ -2905,14 +2960,9 @@ void AArch64InstrInfo::loadRegFromStackSlot(
Opc = AArch64::LD1Twov1d;
Offset = false;
} else if (AArch64::XSeqPairsClassRegClass.hasSubClassEq(RC)) {
BuildMI(MBB, MBBI, DL, get(AArch64::LDPXi))
.addReg(TRI->getSubReg(DestReg, AArch64::sube64),
getDefRegState(true))
.addReg(TRI->getSubReg(DestReg, AArch64::subo64),
getDefRegState(true))
.addFrameIndex(FI)
.addImm(0)
.addMemOperand(MMO);
loadRegPairFromStackSlot(getRegisterInfo(), MBB, MBBI,
get(AArch64::LDPXi), DestReg, AArch64::sube64,
AArch64::subo64, FI, MMO);
return;
}
break;

View File

@ -0,0 +1,42 @@
# RUN: llc -o - %s -mtriple=aarch64-- -mattr=+v8.1a -run-pass=greedy,virtregrewriter | FileCheck %s
# Make sure spills/reloads from xseqpairs and wseqpairs work correctly.
---
# CHECK-LABEL: name: spill_reload_xseqpairs
name: spill_reload_xseqpairs
body: |
bb.0:
; Check the spill/reload sequence for the %0 register
; CHECK: renamable $[[REG0:[a-z0-9]+]]_[[REG1:[a-z0-9]+]] = CASPALX
; CHECK-NEXT: STPXi renamable $[[REG0]], renamable $[[REG1]], %stack.0, 0, implicit killed $[[REG0]]_[[REG1]] :: (store 16 into %stack.0, align 8)
; CHECK: INLINEASM
; CHECK: renamable $[[REG2:[a-z0-9]+]], renamable $[[REG3:[a-z0-9]+]] = LDPXi %stack.0, 0, implicit-def $[[REG2]]_[[REG3]] :: (load 16 from %stack.0, align 8)
; CHECK-NEXT: $xzr = COPY renamable $[[REG2]]
; CHECK-NEXT: $xzr = COPY renamable $[[REG3]]
%0 : xseqpairsclass = IMPLICIT_DEF
%1 : xseqpairsclass = IMPLICIT_DEF
%2 : gpr64common = IMPLICIT_DEF
%0 = CASPALX %0, %1, %2
INLINEASM &" ", 0, 0, implicit def dead $x0, implicit def dead $x1, implicit def dead $x2, implicit def dead $x3, implicit def dead $x4, implicit def dead $x5, implicit def dead $x6, implicit def dead $x7, implicit def dead $x8, implicit def dead $x9, implicit def dead $x10, implicit def dead $x11, implicit def dead $x12, implicit def dead $x13, implicit def dead $x14, implicit def dead $x15, implicit def dead $x16, implicit def dead $x17, implicit def dead $x18, implicit def dead $x19, implicit def dead $x20, implicit def dead $x21, implicit def dead $x22, implicit def dead $x23, implicit def dead $x24, implicit def dead $x25, implicit def dead $x26, implicit def dead $x27, implicit def dead $x28, implicit def dead $fp, implicit def dead $lr
$xzr = COPY %0.sube64
$xzr = COPY %0.subo64
...
---
# CHECK-LABEL: name: spill_reload_wseqpairs
name: spill_reload_wseqpairs
body: |
bb.0:
; Check the spill/reload sequence for the %0 register
; CHECK: $[[REG0:[a-z0-9]+]]_[[REG1:[a-z0-9]+]] = CASPALW
; CHECK-NEXT: STPWi renamable $[[REG0]], renamable $[[REG1]], %stack.0, 0, implicit killed $[[REG0]]_[[REG1]] :: (store 8 into %stack.0, align 4)
; CHECK: INLINEASM
; CHECK: renamable $[[REG2:[a-z0-9]+]], renamable $[[REG3:[a-z0-9]+]] = LDPWi %stack.0, 0, implicit-def $[[REG2]]_[[REG3]] :: (load 8 from %stack.0, align 4)
; CHECK-NEXT: $xzr = COPY renamable $[[REG2]]
; CHECK-NEXT: $xzr = COPY renamable $[[REG3]]
%0 : wseqpairsclass = IMPLICIT_DEF
%1 : wseqpairsclass = IMPLICIT_DEF
%2 : gpr64common = IMPLICIT_DEF
%0 = CASPALW %0, %1, %2
INLINEASM &" ", 0, 0, implicit def dead $x0, implicit def dead $x1, implicit def dead $x2, implicit def dead $x3, implicit def dead $x4, implicit def dead $x5, implicit def dead $x6, implicit def dead $x7, implicit def dead $x8, implicit def dead $x9, implicit def dead $x10, implicit def dead $x11, implicit def dead $x12, implicit def dead $x13, implicit def dead $x14, implicit def dead $x15, implicit def dead $x16, implicit def dead $x17, implicit def dead $x18, implicit def dead $x19, implicit def dead $x20, implicit def dead $x21, implicit def dead $x22, implicit def dead $x23, implicit def dead $x24, implicit def dead $x25, implicit def dead $x26, implicit def dead $x27, implicit def dead $x28, implicit def dead $fp, implicit def dead $lr
$xzr = COPY %0.sube32
$xzr = COPY %0.subo32
...