1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2024-10-18 18:42:46 +02:00

swifterror: Don't compute swifterror vregs during instruction selection

The code used llvm basic block predecessors to decided where to insert phi
nodes. Instruction selection can and will liberally insert new machine basic
block predecessors. There is not a guaranteed one-to-one mapping from pred.
llvm basic blocks and machine basic blocks.

Therefore the current approach does not work as it assumes we can mark
predecessor machine basic block as needing a copy, and needs to know the set of
all predecessor machine basic blocks to decide when to insert phis.

Instead of computing the swifterror vregs as we select instructions, propagate
them at the end of instruction selection when the MBB CFG is complete.

When an instruction needs a swifterror vreg and we don't know the value yet,
generate a new vreg and remember this "upward exposed" use, and reconcile this
at the end of instruction selection.

This will only happen if the target supports promoting swifterror parameters to
registers and the swifterror attribute is used.

rdar://28300923

llvm-svn: 283617
This commit is contained in:
Arnold Schwaighofer 2016-10-07 22:06:55 +00:00
parent 1d88c15c1d
commit 6fad756c74
9 changed files with 292 additions and 222 deletions

View File

@ -72,36 +72,36 @@ public:
/// MBBMap - A mapping from LLVM basic blocks to their machine code entry. /// MBBMap - A mapping from LLVM basic blocks to their machine code entry.
DenseMap<const BasicBlock*, MachineBasicBlock *> MBBMap; DenseMap<const BasicBlock*, MachineBasicBlock *> MBBMap;
typedef SmallVector<unsigned, 1> SwiftErrorVRegs; /// A map from swifterror value in a basic block to the virtual register it is
/// currently represented by.
llvm::DenseMap<std::pair<const MachineBasicBlock *, const Value *>, unsigned>
SwiftErrorVRegDefMap;
/// A list of upward exposed vreg uses that need to be satisfied by either a
/// copy def or a phi node at the beginning of the basic block representing
/// the predecessor(s) swifterror value.
llvm::DenseMap<std::pair<const MachineBasicBlock *, const Value *>, unsigned>
SwiftErrorVRegUpwardsUse;
/// The swifterror argument of the current function.
const Value *SwiftErrorArg;
typedef SmallVector<const Value*, 1> SwiftErrorValues; typedef SmallVector<const Value*, 1> SwiftErrorValues;
/// A function can only have a single swifterror argument. And if it does /// A function can only have a single swifterror argument. And if it does
/// have a swifterror argument, it must be the first entry in /// have a swifterror argument, it must be the first entry in
/// SwiftErrorVals. /// SwiftErrorVals.
SwiftErrorValues SwiftErrorVals; SwiftErrorValues SwiftErrorVals;
/// Track the virtual register for each swifterror value in a given basic
/// block. Entries in SwiftErrorVRegs have the same ordering as entries
/// in SwiftErrorVals.
/// Note that another choice that is more straight-forward is to use
/// Map<const MachineBasicBlock*, Map<Value*, unsigned/*VReg*/>>. It
/// maintains a map from swifterror values to virtual registers for each
/// machine basic block. This choice does not require a one-to-one
/// correspondence between SwiftErrorValues and SwiftErrorVRegs. But because
/// of efficiency concern, we do not choose it.
llvm::DenseMap<const MachineBasicBlock*, SwiftErrorVRegs> SwiftErrorMap;
/// Track the virtual register for each swifterror value at the end of a basic /// Get or create the swifterror value virtual register in
/// block when we need the assignment of a virtual register before the basic /// SwiftErrorVRegDefMap for this basic block.
/// block is visited. When we actually visit the basic block, we will make unsigned getOrCreateSwiftErrorVReg(const MachineBasicBlock *,
/// sure the swifterror value is in the correct virtual register. const Value *);
llvm::DenseMap<const MachineBasicBlock*, SwiftErrorVRegs>
SwiftErrorWorklist;
/// Find the swifterror virtual register in SwiftErrorMap. We will assert /// Set the swifterror virtual register in the SwiftErrorVRegDefMap for this
/// failure when the value does not exist in swifterror map. /// basic block.
unsigned findSwiftErrorVReg(const MachineBasicBlock*, const Value*) const; void setCurrentSwiftErrorVReg(const MachineBasicBlock *MBB, const Value *,
/// Set the swifterror virtual register in SwiftErrorMap. unsigned);
void setSwiftErrorVReg(const MachineBasicBlock *MBB, const Value*, unsigned);
/// ValueMap - Since we emit code for the function a basic block at a time, /// ValueMap - Since we emit code for the function a basic block at a time,
/// we must remember which virtual registers hold the values for /// we must remember which virtual registers hold the values for

View File

@ -1315,15 +1315,6 @@ bool FastISel::selectBitCast(const User *I) {
return true; return true;
} }
// Return true if we should copy from swift error to the final vreg as specified
// by SwiftErrorWorklist.
static bool shouldCopySwiftErrorsToFinalVRegs(const TargetLowering &TLI,
FunctionLoweringInfo &FuncInfo) {
if (!TLI.supportSwiftError())
return false;
return FuncInfo.SwiftErrorWorklist.count(FuncInfo.MBB);
}
// Remove local value instructions starting from the instruction after // Remove local value instructions starting from the instruction after
// SavedLastLocalValue to the current function insert point. // SavedLastLocalValue to the current function insert point.
void FastISel::removeDeadLocalValueCode(MachineInstr *SavedLastLocalValue) void FastISel::removeDeadLocalValueCode(MachineInstr *SavedLastLocalValue)
@ -1348,10 +1339,6 @@ bool FastISel::selectInstruction(const Instruction *I) {
// Just before the terminator instruction, insert instructions to // Just before the terminator instruction, insert instructions to
// feed PHI nodes in successor blocks. // feed PHI nodes in successor blocks.
if (isa<TerminatorInst>(I)) { if (isa<TerminatorInst>(I)) {
// If we need to materialize any vreg from worklist, we bail out of
// FastISel.
if (shouldCopySwiftErrorsToFinalVRegs(TLI, FuncInfo))
return false;
if (!handlePHINodesInSuccessorBlocks(I->getParent())) { if (!handlePHINodesInSuccessorBlocks(I->getParent())) {
// PHI node handling may have generated local value instructions, // PHI node handling may have generated local value instructions,
// even though it failed to handle all PHI nodes. // even though it failed to handle all PHI nodes.

View File

@ -596,18 +596,26 @@ void llvm::AddLandingPadInfo(const LandingPadInst &I, MachineModuleInfo &MMI,
} }
} }
unsigned FunctionLoweringInfo::findSwiftErrorVReg(const MachineBasicBlock *MBB, unsigned
const Value* Val) const { FunctionLoweringInfo::getOrCreateSwiftErrorVReg(const MachineBasicBlock *MBB,
// Find the index in SwiftErrorVals. const Value *Val) {
SwiftErrorValues::const_iterator I = find(SwiftErrorVals, Val); auto Key = std::make_pair(MBB, Val);
assert(I != SwiftErrorVals.end() && "Can't find value in SwiftErrorVals"); auto It = SwiftErrorVRegDefMap.find(Key);
return SwiftErrorMap.lookup(MBB)[I - SwiftErrorVals.begin()]; // If this is the first use of this swifterror value in this basic block,
// create a new virtual register.
// After we processed all basic blocks we will satisfy this "upwards exposed
// use" by inserting a copy or phi at the beginning of this block.
if (It == SwiftErrorVRegDefMap.end()) {
auto &DL = MF->getDataLayout();
const TargetRegisterClass *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
auto VReg = MF->getRegInfo().createVirtualRegister(RC);
SwiftErrorVRegDefMap[Key] = VReg;
SwiftErrorVRegUpwardsUse[Key] = VReg;
return VReg;
} else return It->second;
} }
void FunctionLoweringInfo::setSwiftErrorVReg(const MachineBasicBlock *MBB, void FunctionLoweringInfo::setCurrentSwiftErrorVReg(
const Value* Val, unsigned VReg) { const MachineBasicBlock *MBB, const Value *Val, unsigned VReg) {
// Find the index in SwiftErrorVals. SwiftErrorVRegDefMap[std::make_pair(MBB, Val)] = VReg;
SwiftErrorValues::iterator I = find(SwiftErrorVals, Val);
assert(I != SwiftErrorVals.end() && "Can't find value in SwiftErrorVals");
SwiftErrorMap[MBB][I - SwiftErrorVals.begin()] = VReg;
} }

View File

@ -930,46 +930,9 @@ SDValue SelectionDAGBuilder::getControlRoot() {
return Root; return Root;
} }
/// Copy swift error to the final virtual register at end of a basic block, as
/// specified by SwiftErrorWorklist, if necessary.
static void copySwiftErrorsToFinalVRegs(SelectionDAGBuilder &SDB) {
const TargetLowering &TLI = SDB.DAG.getTargetLoweringInfo();
if (!TLI.supportSwiftError())
return;
if (!SDB.FuncInfo.SwiftErrorWorklist.count(SDB.FuncInfo.MBB))
return;
// Go through entries in SwiftErrorWorklist, and create copy as necessary.
FunctionLoweringInfo::SwiftErrorVRegs &WorklistEntry =
SDB.FuncInfo.SwiftErrorWorklist[SDB.FuncInfo.MBB];
FunctionLoweringInfo::SwiftErrorVRegs &MapEntry =
SDB.FuncInfo.SwiftErrorMap[SDB.FuncInfo.MBB];
for (unsigned I = 0, E = WorklistEntry.size(); I < E; I++) {
unsigned WorkReg = WorklistEntry[I];
// Find the swifterror virtual register for the value in SwiftErrorMap.
unsigned MapReg = MapEntry[I];
assert(TargetRegisterInfo::isVirtualRegister(MapReg) &&
"Entries in SwiftErrorMap should be virtual registers");
if (WorkReg == MapReg)
continue;
// Create copy from SwiftErrorMap to SwiftWorklist.
auto &DL = SDB.DAG.getDataLayout();
SDValue CopyNode = SDB.DAG.getCopyToReg(
SDB.getRoot(), SDB.getCurSDLoc(), WorkReg,
SDB.DAG.getRegister(MapReg, EVT(TLI.getPointerTy(DL))));
MapEntry[I] = WorkReg;
SDB.DAG.setRoot(CopyNode);
}
}
void SelectionDAGBuilder::visit(const Instruction &I) { void SelectionDAGBuilder::visit(const Instruction &I) {
// Set up outgoing PHI node register values before emitting the terminator. // Set up outgoing PHI node register values before emitting the terminator.
if (isa<TerminatorInst>(&I)) { if (isa<TerminatorInst>(&I)) {
copySwiftErrorsToFinalVRegs(*this);
HandlePHINodesInSuccessorBlocks(I.getParent()); HandlePHINodesInSuccessorBlocks(I.getParent());
} }
@ -1489,6 +1452,7 @@ void SelectionDAGBuilder::visitRet(const ReturnInst &I) {
const Function *F = I.getParent()->getParent(); const Function *F = I.getParent()->getParent();
if (TLI.supportSwiftError() && if (TLI.supportSwiftError() &&
F->getAttributes().hasAttrSomewhere(Attribute::SwiftError)) { F->getAttributes().hasAttrSomewhere(Attribute::SwiftError)) {
assert(FuncInfo.SwiftErrorArg && "Need a swift error argument");
ISD::ArgFlagsTy Flags = ISD::ArgFlagsTy(); ISD::ArgFlagsTy Flags = ISD::ArgFlagsTy();
Flags.setSwiftError(); Flags.setSwiftError();
Outs.push_back(ISD::OutputArg(Flags, EVT(TLI.getPointerTy(DL)) /*vt*/, Outs.push_back(ISD::OutputArg(Flags, EVT(TLI.getPointerTy(DL)) /*vt*/,
@ -1496,7 +1460,8 @@ void SelectionDAGBuilder::visitRet(const ReturnInst &I) {
true /*isfixed*/, 1 /*origidx*/, true /*isfixed*/, 1 /*origidx*/,
0 /*partOffs*/)); 0 /*partOffs*/));
// Create SDNode for the swifterror virtual register. // Create SDNode for the swifterror virtual register.
OutVals.push_back(DAG.getRegister(FuncInfo.SwiftErrorMap[FuncInfo.MBB][0], OutVals.push_back(DAG.getRegister(FuncInfo.getOrCreateSwiftErrorVReg(
FuncInfo.MBB, FuncInfo.SwiftErrorArg),
EVT(TLI.getPointerTy(DL)))); EVT(TLI.getPointerTy(DL))));
} }
@ -3590,7 +3555,7 @@ void SelectionDAGBuilder::visitStoreToSwiftError(const StoreInst &I) {
SDValue CopyNode = DAG.getCopyToReg(getRoot(), getCurSDLoc(), VReg, SDValue CopyNode = DAG.getCopyToReg(getRoot(), getCurSDLoc(), VReg,
SDValue(Src.getNode(), Src.getResNo())); SDValue(Src.getNode(), Src.getResNo()));
DAG.setRoot(CopyNode); DAG.setRoot(CopyNode);
FuncInfo.setSwiftErrorVReg(FuncInfo.MBB, I.getOperand(1), VReg); FuncInfo.setCurrentSwiftErrorVReg(FuncInfo.MBB, I.getOperand(1), VReg);
} }
void SelectionDAGBuilder::visitLoadFromSwiftError(const LoadInst &I) { void SelectionDAGBuilder::visitLoadFromSwiftError(const LoadInst &I) {
@ -3618,9 +3583,9 @@ void SelectionDAGBuilder::visitLoadFromSwiftError(const LoadInst &I) {
"expect a single EVT for swifterror"); "expect a single EVT for swifterror");
// Chain, DL, Reg, VT, Glue or Chain, DL, Reg, VT // Chain, DL, Reg, VT, Glue or Chain, DL, Reg, VT
SDValue L = DAG.getCopyFromReg(getRoot(), getCurSDLoc(), SDValue L = DAG.getCopyFromReg(
FuncInfo.findSwiftErrorVReg(FuncInfo.MBB, SV), getRoot(), getCurSDLoc(),
ValueVTs[0]); FuncInfo.getOrCreateSwiftErrorVReg(FuncInfo.MBB, SV), ValueVTs[0]);
setValue(&I, L); setValue(&I, L);
} }
@ -5815,9 +5780,9 @@ void SelectionDAGBuilder::LowerCallTo(ImmutableCallSite CS, SDValue Callee,
SwiftErrorVal = V; SwiftErrorVal = V;
// We find the virtual register for the actual swifterror argument. // We find the virtual register for the actual swifterror argument.
// Instead of using the Value, we use the virtual register instead. // Instead of using the Value, we use the virtual register instead.
Entry.Node = DAG.getRegister( Entry.Node =
FuncInfo.findSwiftErrorVReg(FuncInfo.MBB, V), DAG.getRegister(FuncInfo.getOrCreateSwiftErrorVReg(FuncInfo.MBB, V),
EVT(TLI.getPointerTy(DL))); EVT(TLI.getPointerTy(DL)));
} }
Args.push_back(Entry); Args.push_back(Entry);
@ -5862,7 +5827,7 @@ void SelectionDAGBuilder::LowerCallTo(ImmutableCallSite CS, SDValue Callee,
unsigned VReg = FuncInfo.MF->getRegInfo().createVirtualRegister(RC); unsigned VReg = FuncInfo.MF->getRegInfo().createVirtualRegister(RC);
SDValue CopyNode = CLI.DAG.getCopyToReg(Result.second, CLI.DL, VReg, Src); SDValue CopyNode = CLI.DAG.getCopyToReg(Result.second, CLI.DL, VReg, Src);
// We update the virtual register for the actual swifterror argument. // We update the virtual register for the actual swifterror argument.
FuncInfo.setSwiftErrorVReg(FuncInfo.MBB, SwiftErrorVal, VReg); FuncInfo.setCurrentSwiftErrorVReg(FuncInfo.MBB, SwiftErrorVal, VReg);
DAG.setRoot(CopyNode); DAG.setRoot(CopyNode);
} }
} }
@ -8119,7 +8084,10 @@ void SelectionDAGISel::LowerArguments(const Function &F) {
// If this argument is unused then remember its value. It is used to generate // If this argument is unused then remember its value. It is used to generate
// debugging information. // debugging information.
if (I->use_empty() && NumValues) { bool isSwiftErrorArg =
TLI->supportSwiftError() &&
F.getAttributes().hasAttribute(Idx, Attribute::SwiftError);
if (I->use_empty() && NumValues && !isSwiftErrorArg) {
SDB->setUnusedArgValue(&*I, InVals[i]); SDB->setUnusedArgValue(&*I, InVals[i]);
// Also remember any frame index for use in FastISel. // Also remember any frame index for use in FastISel.
@ -8133,7 +8101,10 @@ void SelectionDAGISel::LowerArguments(const Function &F) {
MVT PartVT = TLI->getRegisterType(*CurDAG->getContext(), VT); MVT PartVT = TLI->getRegisterType(*CurDAG->getContext(), VT);
unsigned NumParts = TLI->getNumRegisters(*CurDAG->getContext(), VT); unsigned NumParts = TLI->getNumRegisters(*CurDAG->getContext(), VT);
if (!I->use_empty()) { // Even an apparant 'unused' swifterror argument needs to be returned. So
// we do generate a copy for it that can be used on return from the
// function.
if (!I->use_empty() || isSwiftErrorArg) {
Optional<ISD::NodeType> AssertOp; Optional<ISD::NodeType> AssertOp;
if (F.getAttributes().hasAttribute(Idx, Attribute::SExt)) if (F.getAttributes().hasAttribute(Idx, Attribute::SExt))
AssertOp = ISD::AssertSext; AssertOp = ISD::AssertSext;
@ -8169,12 +8140,12 @@ void SelectionDAGISel::LowerArguments(const Function &F) {
FuncInfo->setArgumentFrameIndex(&*I, FI->getIndex()); FuncInfo->setArgumentFrameIndex(&*I, FI->getIndex());
} }
// Update SwiftErrorMap. // Update the SwiftErrorVRegDefMap.
if (Res.getOpcode() == ISD::CopyFromReg && TLI->supportSwiftError() && if (Res.getOpcode() == ISD::CopyFromReg && isSwiftErrorArg) {
F.getAttributes().hasAttribute(Idx, Attribute::SwiftError)) {
unsigned Reg = cast<RegisterSDNode>(Res.getOperand(1))->getReg(); unsigned Reg = cast<RegisterSDNode>(Res.getOperand(1))->getReg();
if (TargetRegisterInfo::isVirtualRegister(Reg)) if (TargetRegisterInfo::isVirtualRegister(Reg))
FuncInfo->SwiftErrorMap[FuncInfo->MBB][0] = Reg; FuncInfo->setCurrentSwiftErrorVReg(FuncInfo->MBB,
FuncInfo->SwiftErrorArg, Reg);
} }
// If this argument is live outside of the entry block, insert a copy from // If this argument is live outside of the entry block, insert a copy from

View File

@ -1157,14 +1157,21 @@ static void setupSwiftErrorVals(const Function &Fn, const TargetLowering *TLI,
return; return;
FuncInfo->SwiftErrorVals.clear(); FuncInfo->SwiftErrorVals.clear();
FuncInfo->SwiftErrorMap.clear(); FuncInfo->SwiftErrorVRegDefMap.clear();
FuncInfo->SwiftErrorWorklist.clear(); FuncInfo->SwiftErrorVRegUpwardsUse.clear();
FuncInfo->SwiftErrorArg = nullptr;
// Check if function has a swifterror argument. // Check if function has a swifterror argument.
bool HaveSeenSwiftErrorArg = false;
for (Function::const_arg_iterator AI = Fn.arg_begin(), AE = Fn.arg_end(); for (Function::const_arg_iterator AI = Fn.arg_begin(), AE = Fn.arg_end();
AI != AE; ++AI) AI != AE; ++AI)
if (AI->hasSwiftErrorAttr()) if (AI->hasSwiftErrorAttr()) {
assert(!HaveSeenSwiftErrorArg &&
"Must have only one swifterror parameter");
HaveSeenSwiftErrorArg = true;
FuncInfo->SwiftErrorArg = &*AI;
FuncInfo->SwiftErrorVals.push_back(&*AI); FuncInfo->SwiftErrorVals.push_back(&*AI);
}
for (const auto &LLVMBB : Fn) for (const auto &LLVMBB : Fn)
for (const auto &Inst : LLVMBB) { for (const auto &Inst : LLVMBB) {
@ -1174,95 +1181,152 @@ static void setupSwiftErrorVals(const Function &Fn, const TargetLowering *TLI,
} }
} }
/// For each basic block, merge incoming swifterror values or simply propagate static void createSwiftErrorEntriesInEntryBlock(FunctionLoweringInfo *FuncInfo,
/// them. The merged results will be saved in SwiftErrorMap. For predecessors const TargetLowering *TLI,
/// that are not yet visited, we create virtual registers to hold the swifterror const TargetInstrInfo *TII,
/// values and save them in SwiftErrorWorklist. const BasicBlock *LLVMBB,
static void mergeIncomingSwiftErrors(FunctionLoweringInfo *FuncInfo, SelectionDAGBuilder *SDB) {
const TargetLowering *TLI,
const TargetInstrInfo *TII,
const BasicBlock *LLVMBB,
SelectionDAGBuilder *SDB) {
if (!TLI->supportSwiftError()) if (!TLI->supportSwiftError())
return; return;
// We should only do this when we have swifterror parameter or swifterror // We only need to do this when we have swifterror parameter or swifterror
// alloc. // alloc.
if (FuncInfo->SwiftErrorVals.empty()) if (FuncInfo->SwiftErrorVals.empty())
return; return;
// At beginning of a basic block, insert PHI nodes or get the virtual
// register from the only predecessor, and update SwiftErrorMap; if one
// of the predecessors is not visited, update SwiftErrorWorklist.
// At end of a basic block, if a block is in SwiftErrorWorklist, insert copy
// to sync up the virtual register assignment.
// Always create a virtual register for each swifterror value in entry block.
auto &DL = SDB->DAG.getDataLayout();
const TargetRegisterClass *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
if (pred_begin(LLVMBB) == pred_end(LLVMBB)) { if (pred_begin(LLVMBB) == pred_end(LLVMBB)) {
for (unsigned I = 0, E = FuncInfo->SwiftErrorVals.size(); I < E; I++) { auto &DL = FuncInfo->MF->getDataLayout();
auto const *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
for (const auto *SwiftErrorVal : FuncInfo->SwiftErrorVals) {
// We will always generate a copy from the argument. It is always used at
// least by the 'return' of the swifterror.
if (FuncInfo->SwiftErrorArg && FuncInfo->SwiftErrorArg == SwiftErrorVal)
continue;
unsigned VReg = FuncInfo->MF->getRegInfo().createVirtualRegister(RC); unsigned VReg = FuncInfo->MF->getRegInfo().createVirtualRegister(RC);
// Assign Undef to Vreg. We construct MI directly to make sure it works // Assign Undef to Vreg. We construct MI directly to make sure it works
// with FastISel. // with FastISel.
BuildMI(*FuncInfo->MBB, FuncInfo->InsertPt, SDB->getCurDebugLoc(), BuildMI(*FuncInfo->MBB, FuncInfo->MBB->getFirstNonPHI(),
TII->get(TargetOpcode::IMPLICIT_DEF), VReg); SDB->getCurDebugLoc(), TII->get(TargetOpcode::IMPLICIT_DEF),
FuncInfo->SwiftErrorMap[FuncInfo->MBB].push_back(VReg); VReg);
FuncInfo->setCurrentSwiftErrorVReg(FuncInfo->MBB, SwiftErrorVal, VReg);
} }
}
}
/// Propagate swifterror values through the machine function CFG.
static void propagateSwiftErrorVRegs(FunctionLoweringInfo *FuncInfo) {
auto *TLI = FuncInfo->TLI;
if (!TLI->supportSwiftError())
return; return;
}
if (auto *UniquePred = LLVMBB->getUniquePredecessor()) { // We only need to do this when we have swifterror parameter or swifterror
auto *UniquePredMBB = FuncInfo->MBBMap[UniquePred]; // alloc.
if (!FuncInfo->SwiftErrorMap.count(UniquePredMBB)) { if (FuncInfo->SwiftErrorVals.empty())
// Update SwiftErrorWorklist with a new virtual register.
for (unsigned I = 0, E = FuncInfo->SwiftErrorVals.size(); I < E; I++) {
unsigned VReg = FuncInfo->MF->getRegInfo().createVirtualRegister(RC);
FuncInfo->SwiftErrorWorklist[UniquePredMBB].push_back(VReg);
// Propagate the information from the single predecessor.
FuncInfo->SwiftErrorMap[FuncInfo->MBB].push_back(VReg);
}
return;
}
// Propagate the information from the single predecessor.
FuncInfo->SwiftErrorMap[FuncInfo->MBB] =
FuncInfo->SwiftErrorMap[UniquePredMBB];
return; return;
}
// For the case of multiple predecessors, update SwiftErrorWorklist. // For each machine basic block in reverse post order.
// Handle the case where we have two or more predecessors being the same. ReversePostOrderTraversal<MachineFunction *> RPOT(FuncInfo->MF);
for (const_pred_iterator PI = pred_begin(LLVMBB), PE = pred_end(LLVMBB); for (ReversePostOrderTraversal<MachineFunction *>::rpo_iterator
PI != PE; ++PI) { It = RPOT.begin(),
auto *PredMBB = FuncInfo->MBBMap[*PI]; E = RPOT.end();
if (!FuncInfo->SwiftErrorMap.count(PredMBB) && It != E; ++It) {
!FuncInfo->SwiftErrorWorklist.count(PredMBB)) { MachineBasicBlock *MBB = *It;
for (unsigned I = 0, E = FuncInfo->SwiftErrorVals.size(); I < E; I++) {
unsigned VReg = FuncInfo->MF->getRegInfo().createVirtualRegister(RC); // For each swifterror value in the function.
// When we actually visit the basic block PredMBB, we will materialize for(const auto *SwiftErrorVal : FuncInfo->SwiftErrorVals) {
// the virtual register assignment in copySwiftErrorsToFinalVRegs. auto Key = std::make_pair(MBB, SwiftErrorVal);
FuncInfo->SwiftErrorWorklist[PredMBB].push_back(VReg); auto UUseIt = FuncInfo->SwiftErrorVRegUpwardsUse.find(Key);
auto VRegDefIt = FuncInfo->SwiftErrorVRegDefMap.find(Key);
bool UpwardsUse = UUseIt != FuncInfo->SwiftErrorVRegUpwardsUse.end();
unsigned UUseVReg = UpwardsUse ? UUseIt->second : 0;
bool DownwardDef = VRegDefIt != FuncInfo->SwiftErrorVRegDefMap.end();
assert(!(UpwardsUse && !DownwardDef) &&
"We can't have an upwards use but no downwards def");
// If there is no upwards exposed use and an entry for the swifterror in
// the def map for this value we don't need to do anything: We already
// have a downward def for this basic block.
if (!UpwardsUse && DownwardDef)
continue;
// Otherwise we either have an upwards exposed use vreg that we need to
// materialize or need to forward the downward def from predecessors.
// Check whether we have a single vreg def from all predecessors.
// Otherwise we need a phi.
SmallVector<std::pair<MachineBasicBlock *, unsigned>, 4> VRegs;
SmallSet<const MachineBasicBlock*, 8> Visited;
for (auto *Pred : MBB->predecessors()) {
if (!Visited.insert(Pred).second)
continue;
VRegs.push_back(std::make_pair(
Pred, FuncInfo->getOrCreateSwiftErrorVReg(Pred, SwiftErrorVal)));
if (Pred != MBB)
continue;
// We have a self-edge.
// If there was no upwards use in this basic block there is now one: the
// phi needs to use it self.
if (!UpwardsUse) {
UpwardsUse = true;
UUseIt = FuncInfo->SwiftErrorVRegUpwardsUse.find(Key);
assert(UUseIt != FuncInfo->SwiftErrorVRegUpwardsUse.end());
UUseVReg = UUseIt->second;
}
} }
}
}
// For the case of multiple predecessors, create a virtual register for // We need a phi node if we have more than one predecessor with different
// each swifterror value and generate Phi node. // downward defs.
for (unsigned I = 0, E = FuncInfo->SwiftErrorVals.size(); I < E; I++) { bool needPHI =
unsigned VReg = FuncInfo->MF->getRegInfo().createVirtualRegister(RC); VRegs.size() >= 1 &&
FuncInfo->SwiftErrorMap[FuncInfo->MBB].push_back(VReg); std::find_if(
VRegs.begin(), VRegs.end(),
[&](const std::pair<const MachineBasicBlock *, unsigned> &V)
-> bool { return V.second != VRegs[0].second; }) !=
VRegs.end();
MachineInstrBuilder SwiftErrorPHI = BuildMI(*FuncInfo->MBB, // If there is no upwards exposed used and we don't need a phi just
FuncInfo->InsertPt, SDB->getCurDebugLoc(), // forward the swifterror vreg from the predecessor(s).
TII->get(TargetOpcode::PHI), VReg); if (!UpwardsUse && !needPHI) {
for (const_pred_iterator PI = pred_begin(LLVMBB), PE = pred_end(LLVMBB); assert(!VRegs.empty() &&
PI != PE; ++PI) { "No predecessors? The entry block should bail out earlier");
auto *PredMBB = FuncInfo->MBBMap[*PI]; // Just forward the swifterror vreg from the predecessor(s).
unsigned SwiftErrorReg = FuncInfo->SwiftErrorMap.count(PredMBB) ? FuncInfo->setCurrentSwiftErrorVReg(MBB, SwiftErrorVal, VRegs[0].second);
FuncInfo->SwiftErrorMap[PredMBB][I] : continue;
FuncInfo->SwiftErrorWorklist[PredMBB][I]; }
SwiftErrorPHI.addReg(SwiftErrorReg)
.addMBB(PredMBB); auto DLoc = isa<Instruction>(SwiftErrorVal)
? dyn_cast<Instruction>(SwiftErrorVal)->getDebugLoc()
: DebugLoc();
const auto *TII = FuncInfo->MF->getSubtarget().getInstrInfo();
// If we don't need a phi create a copy to the upward exposed vreg.
if (!needPHI) {
assert(UpwardsUse);
unsigned DestReg = UUseVReg;
BuildMI(*MBB, MBB->getFirstNonPHI(), DLoc, TII->get(TargetOpcode::COPY),
DestReg)
.addReg(VRegs[0].second);
continue;
}
// We need a phi: if there is an upwards exposed use we already have a
// destination virtual register number otherwise we generate a new one.
auto &DL = FuncInfo->MF->getDataLayout();
auto const *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
unsigned PHIVReg =
UpwardsUse ? UUseVReg
: FuncInfo->MF->getRegInfo().createVirtualRegister(RC);
MachineInstrBuilder SwiftErrorPHI =
BuildMI(*MBB, MBB->getFirstNonPHI(), DLoc,
TII->get(TargetOpcode::PHI), PHIVReg);
for (auto BBRegPair : VRegs) {
SwiftErrorPHI.addReg(BBRegPair.second).addMBB(BBRegPair.first);
}
// We did not have a definition in this block before: store the phi's vreg
// as this block downward exposed def.
if (!UpwardsUse)
FuncInfo->setCurrentSwiftErrorVReg(MBB, SwiftErrorVal, PHIVReg);
} }
} }
} }
@ -1313,7 +1377,7 @@ void SelectionDAGISel::SelectAllBasicBlocks(const Function &Fn) {
if (!FuncInfo->MBB) if (!FuncInfo->MBB)
continue; // Some blocks like catchpads have no code or MBB. continue; // Some blocks like catchpads have no code or MBB.
FuncInfo->InsertPt = FuncInfo->MBB->getFirstNonPHI(); FuncInfo->InsertPt = FuncInfo->MBB->getFirstNonPHI();
mergeIncomingSwiftErrors(FuncInfo, TLI, TII, LLVMBB, SDB); createSwiftErrorEntriesInEntryBlock(FuncInfo, TLI, TII, LLVMBB, SDB);
// Setup an EH landing-pad block. // Setup an EH landing-pad block.
FuncInfo->ExceptionPointerVirtReg = 0; FuncInfo->ExceptionPointerVirtReg = 0;
@ -1490,6 +1554,8 @@ void SelectionDAGISel::SelectAllBasicBlocks(const Function &Fn) {
FuncInfo->PHINodesToUpdate.clear(); FuncInfo->PHINodesToUpdate.clear();
} }
propagateSwiftErrorVRegs(FuncInfo);
delete FastIS; delete FastIS;
SDB->clearDanglingDebugInfo(); SDB->clearDanglingDebugInfo();
SDB->SPDescriptor.resetPerFunctionState(); SDB->SPDescriptor.resetPerFunctionState();

View File

@ -19,10 +19,11 @@ define float @foo(%swift_error** swifterror %error_ptr_ref) {
; CHECK-O0-LABEL: foo: ; CHECK-O0-LABEL: foo:
; CHECK-O0: orr w{{.*}}, wzr, #0x10 ; CHECK-O0: orr w{{.*}}, wzr, #0x10
; CHECK-O0: malloc ; CHECK-O0: malloc
; CHECK-O0: mov [[ID2:x[0-9]+]], x0 ; CHECK-O0: mov x19, x0
; CHECK-O0-NOT: x19
; CHECK-O0: orr [[ID:w[0-9]+]], wzr, #0x1 ; CHECK-O0: orr [[ID:w[0-9]+]], wzr, #0x1
; CHECK-O0-NOT: x19
; CHECK-O0: strb [[ID]], [x0, #8] ; CHECK-O0: strb [[ID]], [x0, #8]
; CHECK-O0: mov x19, [[ID2]]
; CHECK-O0-NOT: x19 ; CHECK-O0-NOT: x19
entry: entry:
%call = call i8* @malloc(i64 16) %call = call i8* @malloc(i64 16)
@ -50,7 +51,7 @@ define float @caller(i8* %error_ref) {
; CHECK-O0: mov x19 ; CHECK-O0: mov x19
; CHECK-O0: bl {{.*}}foo ; CHECK-O0: bl {{.*}}foo
; CHECK-O0: mov [[ID:x[0-9]+]], x19 ; CHECK-O0: mov [[ID:x[0-9]+]], x19
; CHECK-O0: cbnz [[ID]] ; CHECK-O0: cbnz x19
entry: entry:
%error_ptr_ref = alloca swifterror %swift_error* %error_ptr_ref = alloca swifterror %swift_error*
store %swift_error* null, %swift_error** %error_ptr_ref store %swift_error* null, %swift_error** %error_ptr_ref
@ -89,7 +90,7 @@ define float @caller2(i8* %error_ref) {
; CHECK-O0: mov x19 ; CHECK-O0: mov x19
; CHECK-O0: bl {{.*}}foo ; CHECK-O0: bl {{.*}}foo
; CHECK-O0: mov [[ID:x[0-9]+]], x19 ; CHECK-O0: mov [[ID:x[0-9]+]], x19
; CHECK-O0: cbnz [[ID]] ; CHECK-O0: cbnz x19
entry: entry:
%error_ptr_ref = alloca swifterror %swift_error* %error_ptr_ref = alloca swifterror %swift_error*
br label %bb_loop br label %bb_loop
@ -128,7 +129,7 @@ define float @foo_if(%swift_error** swifterror %error_ptr_ref, i32 %cc) {
; CHECK-O0-LABEL: foo_if: ; CHECK-O0-LABEL: foo_if:
; spill x19 ; spill x19
; CHECK-O0: str x19 ; CHECK-O0: str x19, [sp, [[SLOT:#[0-9]+]]]
; CHECK-O0: cbz w0 ; CHECK-O0: cbz w0
; CHECK-O0: orr w{{.*}}, wzr, #0x10 ; CHECK-O0: orr w{{.*}}, wzr, #0x10
; CHECK-O0: malloc ; CHECK-O0: malloc
@ -138,7 +139,8 @@ define float @foo_if(%swift_error** swifterror %error_ptr_ref, i32 %cc) {
; CHECK-O0: mov x19, [[ID]] ; CHECK-O0: mov x19, [[ID]]
; CHECK-O0: ret ; CHECK-O0: ret
; reload from stack ; reload from stack
; CHECK-O0: ldr x19 ; CHECK-O0: ldr [[ID3:x[0-9]+]], [sp, [[SLOT]]]
; CHECK-O0: mov x19, [[ID3]]
; CHECK-O0: ret ; CHECK-O0: ret
entry: entry:
%cond = icmp ne i32 %cc, 0 %cond = icmp ne i32 %cc, 0
@ -172,18 +174,26 @@ define float @foo_loop(%swift_error** swifterror %error_ptr_ref, i32 %cc, float
; CHECK-O0-LABEL: foo_loop: ; CHECK-O0-LABEL: foo_loop:
; spill x19 ; spill x19
; CHECK-O0: str x19 ; CHECK-O0: str x19, [sp, [[SLOT:#[0-9]+]]]
; CHECk-O0: cbz ; CHECK-O0: b [[BB1:[A-Za-z0-9_]*]]
; CHECK-O0: [[BB1]]:
; CHECK-O0: ldr x0, [sp, [[SLOT]]]
; CHECK-O0: str x0, [sp, [[SLOT2:#[0-9]+]]]
; CHECK-O0: cbz {{.*}}, [[BB2:[A-Za-z0-9_]*]]
; CHECK-O0: orr w{{.*}}, wzr, #0x10 ; CHECK-O0: orr w{{.*}}, wzr, #0x10
; CHECK-O0: malloc ; CHECK-O0: malloc
; CHECK-O0: mov [[ID:x[0-9]+]], x0 ; CHECK-O0: mov [[ID:x[0-9]+]], x0
; CHECK-O0: strb w{{.*}}, [{{.*}}[[ID]], #8] ; CHECK-O0: strb w{{.*}}, [{{.*}}[[ID]], #8]
; spill x0 ; spill x0
; CHECK-O0: str x0 ; CHECK-O0: str x0, [sp, [[SLOT2]]]
; CHECK-O0:[[BB2]]:
; CHECK-O0: ldr x0, [sp, [[SLOT2]]]
; CHECK-O0: fcmp ; CHECK-O0: fcmp
; CHECK-O0: b.le ; CHECK-O0: str x0, [sp]
; CHECK-O0: b.le [[BB1]]
; reload from stack ; reload from stack
; CHECK-O0: ldr x19 ; CHECK-O0: ldr [[ID3:x[0-9]+]], [sp]
; CHECK-O0: mov x19, [[ID3]]
; CHECK-O0: ret ; CHECK-O0: ret
entry: entry:
br label %bb_loop br label %bb_loop
@ -272,7 +282,7 @@ define float @caller3(i8* %error_ref) {
; CHECK-O0: ldrb [[CODE:w[0-9]+]] ; CHECK-O0: ldrb [[CODE:w[0-9]+]]
; CHECK-O0: ldr [[ID:x[0-9]+]] ; CHECK-O0: ldr [[ID:x[0-9]+]]
; CHECK-O0: strb [[CODE]], [{{.*}}[[ID]]] ; CHECK-O0: strb [[CODE]], [{{.*}}[[ID]]]
; CHECK_O0: bl {{.*}}free ; CHECK-O0: bl {{.*}}free
entry: entry:
%s = alloca %struct.S, align 8 %s = alloca %struct.S, align 8
%error_ptr_ref = alloca swifterror %swift_error* %error_ptr_ref = alloca swifterror %swift_error*

View File

@ -43,20 +43,21 @@ define float @caller(i8* %error_ref) {
; CHECK-APPLE: ldrbeq [[CODE:r[0-9]+]], [r6, #8] ; CHECK-APPLE: ldrbeq [[CODE:r[0-9]+]], [r6, #8]
; CHECK-APPLE: strbeq [[CODE]], [{{.*}}[[ID]]] ; CHECK-APPLE: strbeq [[CODE]], [{{.*}}[[ID]]]
; CHECK-APPLE: mov r0, r6 ; CHECK-APPLE: mov r0, r6
; CHECK_APPLE: bl {{.*}}free ; CHECK-APPLE: bl {{.*}}free
; CHECK-O0-LABEL: caller: ; CHECK-O0-LABEL: caller:
; spill r0 ; spill r0
; CHECK-O0-DAG: str r0,
; CHECK-O0-DAG: mov r6, #0 ; CHECK-O0-DAG: mov r6, #0
; CHECK-O0-DAG: str r0, [sp, [[SLOT:#[0-9]+]]
; CHECK-O0: bl {{.*}}foo ; CHECK-O0: bl {{.*}}foo
; CHECK-O0: mov r{{.*}}, r6 ; CHECK-O0: mov [[TMP:r[0-9]+]], r6
; CHECK-O0: str [[TMP]], [sp]
; CHECK-O0: bne ; CHECK-O0: bne
; CHECK-O0: ldrb [[CODE:r[0-9]+]], [r0, #8] ; CHECK-O0: ldrb [[CODE:r[0-9]+]], [r0, #8]
; reload r0 ; CHECK-O0: ldr [[ID:r[0-9]+]], [sp, [[SLOT]]]
; CHECK-O0: ldr [[ID:r[0-9]+]],
; CHECK-O0: strb [[CODE]], [{{.*}}[[ID]]] ; CHECK-O0: strb [[CODE]], [{{.*}}[[ID]]]
; CHECK-O0: mov r0, ; reload r0
; CHECK-O0: ldr r0, [sp]
; CHECK-O0: free ; CHECK-O0: free
entry: entry:
%error_ptr_ref = alloca swifterror %swift_error* %error_ptr_ref = alloca swifterror %swift_error*
@ -88,7 +89,7 @@ define float @caller2(i8* %error_ref) {
; CHECK-APPLE: ldrb [[CODE:r[0-9]+]], [r6, #8] ; CHECK-APPLE: ldrb [[CODE:r[0-9]+]], [r6, #8]
; CHECK-APPLE: strb [[CODE]], [{{.*}}[[ID]]] ; CHECK-APPLE: strb [[CODE]], [{{.*}}[[ID]]]
; CHECK-APPLE: mov r0, r6 ; CHECK-APPLE: mov r0, r6
; CHECK_APPLE: bl {{.*}}free ; CHECK-APPLE: bl {{.*}}free
; CHECK-O0-LABEL: caller2: ; CHECK-O0-LABEL: caller2:
; spill r0 ; spill r0
@ -96,13 +97,14 @@ define float @caller2(i8* %error_ref) {
; CHECK-O0-DAG: mov r6, #0 ; CHECK-O0-DAG: mov r6, #0
; CHECK-O0: bl {{.*}}foo ; CHECK-O0: bl {{.*}}foo
; CHECK-O0: mov r{{.*}}, r6 ; CHECK-O0: mov r{{.*}}, r6
; CHECK-O0: str r0, [sp]
; CHECK-O0: bne ; CHECK-O0: bne
; CHECK-O0: ble ; CHECK-O0: ble
; CHECK-O0: ldrb [[CODE:r[0-9]+]], [r0, #8] ; CHECK-O0: ldrb [[CODE:r[0-9]+]], [r0, #8]
; reload r0 ; reload r0
; CHECK-O0: ldr [[ID:r[0-9]+]], ; CHECK-O0: ldr [[ID:r[0-9]+]],
; CHECK-O0: strb [[CODE]], [{{.*}}[[ID]]] ; CHECK-O0: strb [[CODE]], [{{.*}}[[ID]]]
; CHECK-O0: mov r0, ; CHECK-O0: ldr r0, [sp]
; CHECK-O0: free ; CHECK-O0: free
entry: entry:
%error_ptr_ref = alloca swifterror %swift_error* %error_ptr_ref = alloca swifterror %swift_error*
@ -268,7 +270,7 @@ define float @caller3(i8* %error_ref) {
; CHECK-APPLE: ldrbeq [[CODE:r[0-9]+]], [r6, #8] ; CHECK-APPLE: ldrbeq [[CODE:r[0-9]+]], [r6, #8]
; CHECK-APPLE: strbeq [[CODE]], [{{.*}}[[ID]]] ; CHECK-APPLE: strbeq [[CODE]], [{{.*}}[[ID]]]
; CHECK-APPLE: mov r0, r6 ; CHECK-APPLE: mov r0, r6
; CHECK_APPLE: bl {{.*}}free ; CHECK-APPLE: bl {{.*}}free
; CHECK-O0-LABEL: caller3: ; CHECK-O0-LABEL: caller3:
; CHECK-O0-DAG: mov r6, #0 ; CHECK-O0-DAG: mov r6, #0
@ -276,14 +278,15 @@ define float @caller3(i8* %error_ref) {
; CHECK-O0-DAG: mov r1 ; CHECK-O0-DAG: mov r1
; CHECK-O0: bl {{.*}}foo_sret ; CHECK-O0: bl {{.*}}foo_sret
; CHECK-O0: mov [[ID2:r[0-9]+]], r6 ; CHECK-O0: mov [[ID2:r[0-9]+]], r6
; CHECK-O0: cmp [[ID2]] ; CHECK-O0: cmp r6
; CHECK-O0: str [[ID2]], [sp[[SLOT:.*]]]
; CHECK-O0: bne ; CHECK-O0: bne
; Access part of the error object and save it to error_ref ; Access part of the error object and save it to error_ref
; CHECK-O0: ldrb [[CODE:r[0-9]+]] ; CHECK-O0: ldrb [[CODE:r[0-9]+]]
; CHECK-O0: ldr [[ID:r[0-9]+]] ; CHECK-O0: ldr [[ID:r[0-9]+]]
; CHECK-O0: strb [[CODE]], [{{.*}}[[ID]]] ; CHECK-O0: strb [[CODE]], [{{.*}}[[ID]]]
; CHECK-O0: mov r0, ; CHECK-O0: ldr r0, [sp[[SLOT]]
; CHECK_O0: bl {{.*}}free ; CHECK-O0: bl {{.*}}free
entry: entry:
%s = alloca %struct.S, align 8 %s = alloca %struct.S, align 8
%error_ptr_ref = alloca swifterror %swift_error* %error_ptr_ref = alloca swifterror %swift_error*
@ -349,7 +352,7 @@ define float @caller4(i8* %error_ref) {
; CHECK-APPLE: ldrbeq [[CODE:r[0-9]+]], [r6, #8] ; CHECK-APPLE: ldrbeq [[CODE:r[0-9]+]], [r6, #8]
; CHECK-APPLE: strbeq [[CODE]], [{{.*}}[[ID]]] ; CHECK-APPLE: strbeq [[CODE]], [{{.*}}[[ID]]]
; CHECK-APPLE: mov r0, r6 ; CHECK-APPLE: mov r0, r6
; CHECK_APPLE: bl {{.*}}free ; CHECK-APPLE: bl {{.*}}free
entry: entry:
%error_ptr_ref = alloca swifterror %swift_error* %error_ptr_ref = alloca swifterror %swift_error*
store %swift_error* null, %swift_error** %error_ptr_ref store %swift_error* null, %swift_error** %error_ptr_ref

View File

@ -16,9 +16,8 @@ define float @foo(%swift_error** swifterror %error_ptr_ref) {
; CHECK-O0-LABEL: foo: ; CHECK-O0-LABEL: foo:
; CHECK-O0: lghi %r2, 16 ; CHECK-O0: lghi %r2, 16
; CHECK-O0: brasl %r14, malloc ; CHECK-O0: brasl %r14, malloc
; CHECK-O0: lgr %r[[REG1:[0-9]+]], %r2 ; CHECK-O0: lgr %r9, %r2
; CHECK-O0: mvi 8(%r2), 1 ; CHECK-O0: mvi 8(%r2), 1
; CHECK-O0: lgr %r9, %r[[REG1]]
entry: entry:
%call = call i8* @malloc(i64 16) %call = call i8* @malloc(i64 16)
%call.0 = bitcast i8* %call to %swift_error* %call.0 = bitcast i8* %call to %swift_error*
@ -130,7 +129,8 @@ define float @foo_if(%swift_error** swifterror %error_ptr_ref, i32 %cc) {
; CHECK-O0: lgr %r9, %r[[REG1]] ; CHECK-O0: lgr %r9, %r[[REG1]]
; CHECK-O0: br %r14 ; CHECK-O0: br %r14
; reload from stack ; reload from stack
; CHECK-O0: lg %r9, [[OFFS]](%r15) ; CHECK-O0: lg %r[[REG2:[0-9]+]], [[OFFS]](%r15)
; CHECK-O0: lgr %r9, %r[[REG2]]
; CHECK-O0: br %r14 ; CHECK-O0: br %r14
entry: entry:
%cond = icmp ne i32 %cc, 0 %cond = icmp ne i32 %cc, 0
@ -172,7 +172,8 @@ define float @foo_loop(%swift_error** swifterror %error_ptr_ref, i32 %cc, float
; CHECK-O0: mvi 8(%r2), 1 ; CHECK-O0: mvi 8(%r2), 1
; CHECK-O0: jnh ; CHECK-O0: jnh
; reload from stack ; reload from stack
; CHECK-O0: lg %r9, [[OFFS:[0-9]+]](%r15) ; CHECK-O0: lg %r[[REG2:[0-9]+]], [[OFFS:[0-9]+]](%r15)
; CHECK-O0: lgr %r9, %r[[REG2]]
; CHECK-O0: br %r14 ; CHECK-O0: br %r14
entry: entry:
br label %bb_loop br label %bb_loop

View File

@ -39,7 +39,7 @@ define float @caller(i8* %error_ref) {
; Access part of the error object and save it to error_ref ; Access part of the error object and save it to error_ref
; CHECK-APPLE: movb 8(%r12) ; CHECK-APPLE: movb 8(%r12)
; CHECK-APPLE: movq %r12, %rdi ; CHECK-APPLE: movq %r12, %rdi
; CHECK_APPLE: callq {{.*}}free ; CHECK-APPLE: callq {{.*}}free
; CHECK-O0-LABEL: caller: ; CHECK-O0-LABEL: caller:
; CHECK-O0: xorl ; CHECK-O0: xorl
@ -76,14 +76,14 @@ define float @caller2(i8* %error_ref) {
; Access part of the error object and save it to error_ref ; Access part of the error object and save it to error_ref
; CHECK-APPLE: movb 8(%r12) ; CHECK-APPLE: movb 8(%r12)
; CHECK-APPLE: movq %r12, %rdi ; CHECK-APPLE: movq %r12, %rdi
; CHECK_APPLE: callq {{.*}}free ; CHECK-APPLE: callq {{.*}}free
; CHECK-O0-LABEL: caller2: ; CHECK-O0-LABEL: caller2:
; CHECK-O0: xorl ; CHECK-O0: xorl
; CHECK-O0: movl %{{.*}}, %r12d ; CHECK-O0: movl %{{.*}}, %r12d
; CHECK-O0: callq {{.*}}foo ; CHECK-O0: callq {{.*}}foo
; CHECK-O0: movq %r12, [[ID:%[a-z]+]] ; CHECK-O0: movq %r12, [[ID:%[a-z]+]]
; CHECK-O0: cmpq $0, [[ID]] ; CHECK-O0: cmpq $0, %r12
; CHECK-O0: jne ; CHECK-O0: jne
entry: entry:
%error_ptr_ref = alloca swifterror %swift_error* %error_ptr_ref = alloca swifterror %swift_error*
@ -133,7 +133,8 @@ define float @foo_if(%swift_error** swifterror %error_ptr_ref, i32 %cc) {
; CHECK-O0-DAG: movq [[ID]], %r12 ; CHECK-O0-DAG: movq [[ID]], %r12
; CHECK-O0: ret ; CHECK-O0: ret
; reload from stack ; reload from stack
; CHECK-O0: movq {{.*}}(%rsp), %r12 ; CHECK-O0: movq {{.*}}(%rsp), [[REG:%[a-z]+]]
; CHECK-O0: movq [[REG]], %r12
; CHECK-O0: ret ; CHECK-O0: ret
entry: entry:
%cond = icmp ne i32 %cc, 0 %cond = icmp ne i32 %cc, 0
@ -173,11 +174,12 @@ define float @foo_loop(%swift_error** swifterror %error_ptr_ref, i32 %cc, float
; CHECK-O0: je ; CHECK-O0: je
; CHECK-O0: movl $16, ; CHECK-O0: movl $16,
; CHECK-O0: malloc ; CHECK-O0: malloc
; CHECK-O0: movq %rax, [[ID:%[a-z]+]] ; CHECK-O0: movq %rax, [[ID:%[a-z0-9]+]]
; CHECK-O0: movb $1, 8([[ID]]) ; CHECK-O0: movb $1, 8([[ID]])
; CHECK-O0: jbe ; CHECK-O0: jbe
; reload from stack ; reload from stack
; CHECK-O0: movq {{.*}}(%rsp), %r12 ; CHECK-O0: movq {{.*}}(%rsp), [[REG:%[a-z0-9]+]]
; CHECK-O0: movq [[REG]], %r12
; CHECK-O0: ret ; CHECK-O0: ret
entry: entry:
br label %bb_loop br label %bb_loop
@ -251,7 +253,7 @@ define float @caller3(i8* %error_ref) {
; CHECK-APPLE: movb 8(%r12), ; CHECK-APPLE: movb 8(%r12),
; CHECK-APPLE: movb %{{.*}}, ; CHECK-APPLE: movb %{{.*}},
; CHECK-APPLE: movq %r12, %rdi ; CHECK-APPLE: movq %r12, %rdi
; CHECK_APPLE: callq {{.*}}free ; CHECK-APPLE: callq {{.*}}free
; CHECK-O0-LABEL: caller3: ; CHECK-O0-LABEL: caller3:
; CHECK-O0: xorl ; CHECK-O0: xorl
@ -300,7 +302,7 @@ define float @caller_with_multiple_swifterror_values(i8* %error_ref, i8* %error_
; Access part of the error object and save it to error_ref ; Access part of the error object and save it to error_ref
; CHECK-APPLE: movb 8(%r12) ; CHECK-APPLE: movb 8(%r12)
; CHECK-APPLE: movq %r12, %rdi ; CHECK-APPLE: movq %r12, %rdi
; CHECK_APPLE: callq {{.*}}free ; CHECK-APPLE: callq {{.*}}free
; The second swifterror value: ; The second swifterror value:
; CHECK-APPLE: xorl %r12d, %r12d ; CHECK-APPLE: xorl %r12d, %r12d
@ -310,7 +312,7 @@ define float @caller_with_multiple_swifterror_values(i8* %error_ref, i8* %error_
; Access part of the error object and save it to error_ref ; Access part of the error object and save it to error_ref
; CHECK-APPLE: movb 8(%r12) ; CHECK-APPLE: movb 8(%r12)
; CHECK-APPLE: movq %r12, %rdi ; CHECK-APPLE: movq %r12, %rdi
; CHECK_APPLE: callq {{.*}}free ; CHECK-APPLE: callq {{.*}}free
; CHECK-O0-LABEL: caller_with_multiple_swifterror_values: ; CHECK-O0-LABEL: caller_with_multiple_swifterror_values:
@ -411,12 +413,9 @@ define swiftcc float @forward_swifterror(%swift_error** swifterror %error_ptr_re
; CHECK-APPLE: retq ; CHECK-APPLE: retq
; CHECK-O0-LABEL: forward_swifterror: ; CHECK-O0-LABEL: forward_swifterror:
; CHECK-O0: subq $24, %rsp ; CHECK-O0: pushq %rax
; CHECK-O0: movq %r12, %rcx
; CHECK-O0: movq %rcx, 16(%rsp)
; CHECK-O0: movq %rax, 8(%rsp)
; CHECK-O0: callq _moo ; CHECK-O0: callq _moo
; CHECK-O0: addq $24, %rsp ; CHECK-O0: popq %rax
; CHECK-O0: retq ; CHECK-O0: retq
entry: entry:
@ -440,20 +439,21 @@ define swiftcc float @conditionally_forward_swifterror(%swift_error** swifterror
; CHECK-O0-LABEL: conditionally_forward_swifterror: ; CHECK-O0-LABEL: conditionally_forward_swifterror:
; CHECK-O0: subq $24, %rsp ; CHECK-O0: subq $24, %rsp
; CHECK-O0: movq %r12, %rcx ; CHECK-O0: movq %r12, [[REG1:%[a-z0-9]+]]
; CHECK-O0: cmpl $0, %edi ; CHECK-O0: cmpl $0, %edi
; CHECK-O0: movq %rax, 16(%rsp) ; CHECK-O0-DAG: movq [[REG1]], [[STK:[0-9]+]](%rsp)
; CHECK-O0: movq %r12, 8(%rsp) ; CHECK-O0-DAG: movq %r12, [[STK2:[0-9]+]](%rsp)
; CHECK-O0: movq %rcx, (%rsp)
; CHECK-O0: je ; CHECK-O0: je
; CHECK-O0: movq 8(%rsp), %r12 ; CHECK-O0: movq [[STK2]](%rsp), [[REG:%[a-z0-9]+]]
; CHECK-O0: movq [[REG]], %r12
; CHECK-O0: callq _moo ; CHECK-O0: callq _moo
; CHECK-O0: addq $24, %rsp ; CHECK-O0: addq $24, %rsp
; CHECK-O0: retq ; CHECK-O0: retq
; CHECK-O0: movq [[STK2]](%rsp), [[REG:%[a-z0-9]+]]
; CHECK-O0: xorps %xmm0, %xmm0 ; CHECK-O0: xorps %xmm0, %xmm0
; CHECK-O0: movq 8(%rsp), %r12 ; CHECK-O0: movq [[REG]], %r12
; CHECK-O0: addq $24, %rsp ; CHECK-O0: addq $24, %rsp
; CHECK-O0: retq ; CHECK-O0: retq
entry: entry:
@ -495,3 +495,27 @@ define swiftcc {i32, i32, i32} @empty_swiftcc({i32, i32, i32} , %swift_error** s
entry: entry:
ret {i32, i32, i32} %0 ret {i32, i32, i32} %0
} }
; Make sure we can handle the case when isel generates new machine basic blocks.
; CHECK-APPLE-LABEL: dont_crash_on_new_isel_blocks:
; CHECK-APPLE: pushq %rax
; CHECK-APPLE: xorl %eax, %eax
; CHECK-APPLE: testb %al, %al
; CHECK-APPLE: jne
; CHECK-APPLE: callq *%rax
; CHECK-APPLE: popq %rax
; CHECK-APPLE: ret
define swiftcc void @dont_crash_on_new_isel_blocks(%swift_error** nocapture swifterror, i1, i8**) {
entry:
%3 = or i1 false, %1
br i1 %3, label %cont, label %falsebb
falsebb:
%4 = load i8*, i8** %2, align 8
br label %cont
cont:
tail call swiftcc void undef(%swift_error** nocapture swifterror %0)
ret void
}