mirror of
https://github.com/RPCS3/llvm-mirror.git
synced 2024-11-23 11:13:28 +01:00
ObjCARC: Remove implicit ilist iterator conversions, NFC
llvm-svn: 250756
This commit is contained in:
parent
96c5c49122
commit
ba820b0689
@ -226,7 +226,7 @@ llvm::objcarc::FindDependencies(DependenceKind Flavor,
|
||||
SmallPtrSetImpl<Instruction *> &DependingInsts,
|
||||
SmallPtrSetImpl<const BasicBlock *> &Visited,
|
||||
ProvenanceAnalysis &PA) {
|
||||
BasicBlock::iterator StartPos = StartInst;
|
||||
BasicBlock::iterator StartPos = StartInst->getIterator();
|
||||
|
||||
SmallVector<std::pair<BasicBlock *, BasicBlock::iterator>, 4> Worklist;
|
||||
Worklist.push_back(std::make_pair(StartBB, StartPos));
|
||||
@ -252,7 +252,7 @@ llvm::objcarc::FindDependencies(DependenceKind Flavor,
|
||||
break;
|
||||
}
|
||||
|
||||
Instruction *Inst = --LocalStartPos;
|
||||
Instruction *Inst = &*--LocalStartPos;
|
||||
if (Depends(Flavor, Inst, Arg, PA)) {
|
||||
DependingInsts.insert(Inst);
|
||||
break;
|
||||
|
@ -72,12 +72,9 @@ bool ObjCARCAPElim::MayAutorelease(ImmutableCallSite CS, unsigned Depth) {
|
||||
if (const Function *Callee = CS.getCalledFunction()) {
|
||||
if (Callee->isDeclaration() || Callee->mayBeOverridden())
|
||||
return true;
|
||||
for (Function::const_iterator I = Callee->begin(), E = Callee->end();
|
||||
I != E; ++I) {
|
||||
const BasicBlock *BB = I;
|
||||
for (BasicBlock::const_iterator J = BB->begin(), F = BB->end();
|
||||
J != F; ++J)
|
||||
if (ImmutableCallSite JCS = ImmutableCallSite(J))
|
||||
for (const BasicBlock &BB : *Callee) {
|
||||
for (const Instruction &I : BB)
|
||||
if (ImmutableCallSite JCS = ImmutableCallSite(&I))
|
||||
// This recursion depth limit is arbitrary. It's just great
|
||||
// enough to cover known interesting testcases.
|
||||
if (Depth < 3 &&
|
||||
@ -96,7 +93,7 @@ bool ObjCARCAPElim::OptimizeBB(BasicBlock *BB) {
|
||||
|
||||
Instruction *Push = nullptr;
|
||||
for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ) {
|
||||
Instruction *Inst = I++;
|
||||
Instruction *Inst = &*I++;
|
||||
switch (GetBasicARCInstKind(Inst)) {
|
||||
case ARCInstKind::AutoreleasepoolPush:
|
||||
Push = Inst;
|
||||
@ -169,7 +166,7 @@ bool ObjCARCAPElim::runOnModule(Module &M) {
|
||||
if (std::next(F->begin()) != F->end())
|
||||
continue;
|
||||
// Ok, a single-block constructor function definition. Try to optimize it.
|
||||
Changed |= OptimizeBB(F->begin());
|
||||
Changed |= OptimizeBB(&F->front());
|
||||
}
|
||||
|
||||
return Changed;
|
||||
|
@ -119,9 +119,9 @@ bool ObjCARCContract::optimizeRetainCall(Function &F, Instruction *Retain) {
|
||||
return false;
|
||||
|
||||
// Check that the call is next to the retain.
|
||||
BasicBlock::const_iterator I = Call;
|
||||
++I;
|
||||
while (IsNoopInstruction(I)) ++I;
|
||||
BasicBlock::const_iterator I = ++Call->getIterator();
|
||||
while (IsNoopInstruction(&*I))
|
||||
++I;
|
||||
if (&*I != Retain)
|
||||
return false;
|
||||
|
||||
@ -282,9 +282,9 @@ findRetainForStoreStrongContraction(Value *New, StoreInst *Store,
|
||||
Instruction *Release,
|
||||
ProvenanceAnalysis &PA) {
|
||||
// Walk up from the Store to find the retain.
|
||||
BasicBlock::iterator I = Store;
|
||||
BasicBlock::iterator I = Store->getIterator();
|
||||
BasicBlock::iterator Begin = Store->getParent()->begin();
|
||||
while (I != Begin && GetBasicARCInstKind(I) != ARCInstKind::Retain) {
|
||||
while (I != Begin && GetBasicARCInstKind(&*I) != ARCInstKind::Retain) {
|
||||
Instruction *Inst = &*I;
|
||||
|
||||
// It is only safe to move the retain to the store if we can prove
|
||||
@ -294,7 +294,7 @@ findRetainForStoreStrongContraction(Value *New, StoreInst *Store,
|
||||
return nullptr;
|
||||
--I;
|
||||
}
|
||||
Instruction *Retain = I;
|
||||
Instruction *Retain = &*I;
|
||||
if (GetBasicARCInstKind(Retain) != ARCInstKind::Retain)
|
||||
return nullptr;
|
||||
if (GetArgRCIdentityRoot(Retain) != New)
|
||||
@ -429,7 +429,7 @@ bool ObjCARCContract::tryToPeepholeInstruction(
|
||||
// insert it now.
|
||||
if (!RetainRVMarker)
|
||||
return false;
|
||||
BasicBlock::iterator BBI = Inst;
|
||||
BasicBlock::iterator BBI = Inst->getIterator();
|
||||
BasicBlock *InstParent = Inst->getParent();
|
||||
|
||||
// Step up to see if the call immediately precedes the RetainRV call.
|
||||
@ -440,11 +440,11 @@ bool ObjCARCContract::tryToPeepholeInstruction(
|
||||
BasicBlock *Pred = InstParent->getSinglePredecessor();
|
||||
if (!Pred)
|
||||
goto decline_rv_optimization;
|
||||
BBI = Pred->getTerminator();
|
||||
BBI = Pred->getTerminator()->getIterator();
|
||||
break;
|
||||
}
|
||||
--BBI;
|
||||
} while (IsNoopInstruction(BBI));
|
||||
} while (IsNoopInstruction(&*BBI));
|
||||
|
||||
if (&*BBI == GetArgRCIdentityRoot(Inst)) {
|
||||
DEBUG(dbgs() << "Adding inline asm marker for "
|
||||
|
@ -581,16 +581,18 @@ ObjCARCOpt::OptimizeRetainRVCall(Function &F, Instruction *RetainRV) {
|
||||
ImmutableCallSite CS(Arg);
|
||||
if (const Instruction *Call = CS.getInstruction()) {
|
||||
if (Call->getParent() == RetainRV->getParent()) {
|
||||
BasicBlock::const_iterator I = Call;
|
||||
BasicBlock::const_iterator I(Call);
|
||||
++I;
|
||||
while (IsNoopInstruction(I)) ++I;
|
||||
while (IsNoopInstruction(&*I))
|
||||
++I;
|
||||
if (&*I == RetainRV)
|
||||
return false;
|
||||
} else if (const InvokeInst *II = dyn_cast<InvokeInst>(Call)) {
|
||||
BasicBlock *RetainRVParent = RetainRV->getParent();
|
||||
if (II->getNormalDest() == RetainRVParent) {
|
||||
BasicBlock::const_iterator I = RetainRVParent->begin();
|
||||
while (IsNoopInstruction(I)) ++I;
|
||||
while (IsNoopInstruction(&*I))
|
||||
++I;
|
||||
if (&*I == RetainRV)
|
||||
return false;
|
||||
}
|
||||
@ -599,18 +601,21 @@ ObjCARCOpt::OptimizeRetainRVCall(Function &F, Instruction *RetainRV) {
|
||||
|
||||
// Check for being preceded by an objc_autoreleaseReturnValue on the same
|
||||
// pointer. In this case, we can delete the pair.
|
||||
BasicBlock::iterator I = RetainRV, Begin = RetainRV->getParent()->begin();
|
||||
BasicBlock::iterator I = RetainRV->getIterator(),
|
||||
Begin = RetainRV->getParent()->begin();
|
||||
if (I != Begin) {
|
||||
do --I; while (I != Begin && IsNoopInstruction(I));
|
||||
if (GetBasicARCInstKind(I) == ARCInstKind::AutoreleaseRV &&
|
||||
GetArgRCIdentityRoot(I) == Arg) {
|
||||
do
|
||||
--I;
|
||||
while (I != Begin && IsNoopInstruction(&*I));
|
||||
if (GetBasicARCInstKind(&*I) == ARCInstKind::AutoreleaseRV &&
|
||||
GetArgRCIdentityRoot(&*I) == Arg) {
|
||||
Changed = true;
|
||||
++NumPeeps;
|
||||
|
||||
DEBUG(dbgs() << "Erasing autoreleaseRV,retainRV pair: " << *I << "\n"
|
||||
<< "Erasing " << *RetainRV << "\n");
|
||||
|
||||
EraseInstruction(I);
|
||||
EraseInstruction(&*I);
|
||||
EraseInstruction(RetainRV);
|
||||
return true;
|
||||
}
|
||||
@ -1216,7 +1221,7 @@ bool ObjCARCOpt::VisitBottomUp(BasicBlock *BB,
|
||||
|
||||
// Visit all the instructions, bottom-up.
|
||||
for (BasicBlock::iterator I = BB->end(), E = BB->begin(); I != E; --I) {
|
||||
Instruction *Inst = std::prev(I);
|
||||
Instruction *Inst = &*std::prev(I);
|
||||
|
||||
// Invoke instructions are visited as part of their successors (below).
|
||||
if (isa<InvokeInst>(Inst))
|
||||
@ -1342,12 +1347,10 @@ ObjCARCOpt::VisitTopDown(BasicBlock *BB,
|
||||
<< "Performing Dataflow:\n");
|
||||
|
||||
// Visit all the instructions, top-down.
|
||||
for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
|
||||
Instruction *Inst = I;
|
||||
for (Instruction &Inst : *BB) {
|
||||
DEBUG(dbgs() << " Visiting " << Inst << "\n");
|
||||
|
||||
DEBUG(dbgs() << " Visiting " << *Inst << "\n");
|
||||
|
||||
NestingDetected |= VisitInstructionTopDown(Inst, Releases, MyStates);
|
||||
NestingDetected |= VisitInstructionTopDown(&Inst, Releases, MyStates);
|
||||
}
|
||||
|
||||
DEBUG(llvm::dbgs() << "\nState Before Checking for CFG Hazards:\n"
|
||||
@ -1413,16 +1416,15 @@ ComputePostOrders(Function &F,
|
||||
// Functions may have many exits, and there also blocks which we treat
|
||||
// as exits due to ignored edges.
|
||||
SmallVector<std::pair<BasicBlock *, BBState::edge_iterator>, 16> PredStack;
|
||||
for (Function::iterator I = F.begin(), E = F.end(); I != E; ++I) {
|
||||
BasicBlock *ExitBB = I;
|
||||
BBState &MyStates = BBStates[ExitBB];
|
||||
for (BasicBlock &ExitBB : F) {
|
||||
BBState &MyStates = BBStates[&ExitBB];
|
||||
if (!MyStates.isExit())
|
||||
continue;
|
||||
|
||||
MyStates.SetAsExit();
|
||||
|
||||
PredStack.push_back(std::make_pair(ExitBB, MyStates.pred_begin()));
|
||||
Visited.insert(ExitBB);
|
||||
PredStack.push_back(std::make_pair(&ExitBB, MyStates.pred_begin()));
|
||||
Visited.insert(&ExitBB);
|
||||
while (!PredStack.empty()) {
|
||||
reverse_dfs_next_succ:
|
||||
BBState::edge_iterator PE = BBStates[PredStack.back().first].pred_end();
|
||||
@ -1830,7 +1832,7 @@ void ObjCARCOpt::OptimizeWeakCalls(Function &F) {
|
||||
// analysis too, but that would want caching. A better approach would be to
|
||||
// use the technique that EarlyCSE uses.
|
||||
inst_iterator Current = std::prev(I);
|
||||
BasicBlock *CurrentBB = Current.getBasicBlockIterator();
|
||||
BasicBlock *CurrentBB = &*Current.getBasicBlockIterator();
|
||||
for (BasicBlock::iterator B = CurrentBB->begin(),
|
||||
J = Current.getInstructionIterator();
|
||||
J != B; --J) {
|
||||
@ -2081,9 +2083,8 @@ void ObjCARCOpt::OptimizeReturns(Function &F) {
|
||||
|
||||
SmallPtrSet<Instruction *, 4> DependingInstructions;
|
||||
SmallPtrSet<const BasicBlock *, 4> Visited;
|
||||
for (Function::iterator FI = F.begin(), FE = F.end(); FI != FE; ++FI) {
|
||||
BasicBlock *BB = FI;
|
||||
ReturnInst *Ret = dyn_cast<ReturnInst>(&BB->back());
|
||||
for (BasicBlock &BB: F) {
|
||||
ReturnInst *Ret = dyn_cast<ReturnInst>(&BB.back());
|
||||
|
||||
DEBUG(dbgs() << "Visiting: " << *Ret << "\n");
|
||||
|
||||
@ -2095,19 +2096,16 @@ void ObjCARCOpt::OptimizeReturns(Function &F) {
|
||||
// Look for an ``autorelease'' instruction that is a predecessor of Ret and
|
||||
// dependent on Arg such that there are no instructions dependent on Arg
|
||||
// that need a positive ref count in between the autorelease and Ret.
|
||||
CallInst *Autorelease =
|
||||
FindPredecessorAutoreleaseWithSafePath(Arg, BB, Ret,
|
||||
DependingInstructions, Visited,
|
||||
PA);
|
||||
CallInst *Autorelease = FindPredecessorAutoreleaseWithSafePath(
|
||||
Arg, &BB, Ret, DependingInstructions, Visited, PA);
|
||||
DependingInstructions.clear();
|
||||
Visited.clear();
|
||||
|
||||
if (!Autorelease)
|
||||
continue;
|
||||
|
||||
CallInst *Retain =
|
||||
FindPredecessorRetainWithSafePath(Arg, BB, Autorelease,
|
||||
DependingInstructions, Visited, PA);
|
||||
CallInst *Retain = FindPredecessorRetainWithSafePath(
|
||||
Arg, &BB, Autorelease, DependingInstructions, Visited, PA);
|
||||
DependingInstructions.clear();
|
||||
Visited.clear();
|
||||
|
||||
|
@ -256,9 +256,9 @@ void BottomUpPtrState::HandlePotentialUse(BasicBlock *BB, Instruction *Inst,
|
||||
// one of its successor blocks, since we can't insert code after it
|
||||
// in its own block, and we don't want to split critical edges.
|
||||
if (isa<InvokeInst>(Inst))
|
||||
InsertReverseInsertPt(BB->getFirstInsertionPt());
|
||||
InsertReverseInsertPt(&*BB->getFirstInsertionPt());
|
||||
else
|
||||
InsertReverseInsertPt(std::next(BasicBlock::iterator(Inst)));
|
||||
InsertReverseInsertPt(&*++Inst->getIterator());
|
||||
SetSeq(S_Use);
|
||||
} else if (Seq == S_Release && IsUser(Class)) {
|
||||
DEBUG(dbgs() << " PreciseReleaseUse: Seq: " << GetSeq() << "; "
|
||||
@ -268,9 +268,9 @@ void BottomUpPtrState::HandlePotentialUse(BasicBlock *BB, Instruction *Inst,
|
||||
assert(!HasReverseInsertPts());
|
||||
// As above; handle invoke specially.
|
||||
if (isa<InvokeInst>(Inst))
|
||||
InsertReverseInsertPt(BB->getFirstInsertionPt());
|
||||
InsertReverseInsertPt(&*BB->getFirstInsertionPt());
|
||||
else
|
||||
InsertReverseInsertPt(std::next(BasicBlock::iterator(Inst)));
|
||||
InsertReverseInsertPt(&*++Inst->getIterator());
|
||||
}
|
||||
break;
|
||||
case S_Stop:
|
||||
|
Loading…
Reference in New Issue
Block a user