1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2024-10-20 19:42:54 +02:00

[llvm][NFC] Style fixes in Inliner.cpp

Summary:
Function names: camel case, lower case first letter.
Variable names: start with upper letter. For iterators that were 'i',
renamed with a descriptive name, as 'I' is 'Instruction&'.

Lambda captures simplification.

Opportunistic boolean return simplification.

Reviewers: davidxl, dblaikie

Subscribers: eraman, hiraditya, llvm-commits

Tags: #llvm

Differential Revision: https://reviews.llvm.org/D77837
This commit is contained in:
Mircea Trofin 2020-04-09 15:26:46 -07:00
parent 7f4648c85e
commit de85a96840

View File

@ -181,7 +181,7 @@ static void mergeInlinedArrayAllocas(
// Loop over all the allocas we have so far and see if they can be merged with
// a previously inlined alloca. If not, remember that we had it.
for (unsigned AllocaNo = 0, e = IFI.StaticAllocas.size(); AllocaNo != e;
for (unsigned AllocaNo = 0, E = IFI.StaticAllocas.size(); AllocaNo != E;
++AllocaNo) {
AllocaInst *AI = IFI.StaticAllocas[AllocaNo];
@ -272,7 +272,7 @@ static void mergeInlinedArrayAllocas(
/// available from other functions inlined into the caller. If we are able to
/// inline this call site we attempt to reuse already available allocas or add
/// any new allocas to the set if not possible.
static InlineResult InlineCallIfPossible(
static InlineResult inlineCallIfPossible(
CallBase &CS, InlineFunctionInfo &IFI,
InlinedArrayAllocasTy &InlinedArrayAllocas, int InlineHistory,
bool InsertLifetime, function_ref<AAResults &(Function &)> &AARGetter,
@ -338,7 +338,7 @@ shouldBeDeferred(Function *Caller, InlineCost IC, int &TotalSecondaryCost,
// can apply a huge negative bonus to TotalSecondaryCost.
bool ApplyLastCallBonus = Caller->hasLocalLinkage() && !Caller->hasOneUse();
// This bool tracks what happens if we DO inline C into B.
bool inliningPreventsSomeOuterInline = false;
bool InliningPreventsSomeOuterInline = false;
for (User *U : Caller->users()) {
// If the caller will not be removed (either because it does not have a
// local linkage or because the LastCallToStaticBonus has been already
@ -368,7 +368,7 @@ shouldBeDeferred(Function *Caller, InlineCost IC, int &TotalSecondaryCost,
// this callsite. We subtract off the penalty for the call instruction,
// which we would be deleting.
if (IC2.getCostDelta() <= CandidateCost) {
inliningPreventsSomeOuterInline = true;
InliningPreventsSomeOuterInline = true;
TotalSecondaryCost += IC2.getCost();
}
}
@ -379,10 +379,7 @@ shouldBeDeferred(Function *Caller, InlineCost IC, int &TotalSecondaryCost,
if (ApplyLastCallBonus)
TotalSecondaryCost -= InlineConstants::LastCallToStaticBonus;
if (inliningPreventsSomeOuterInline && TotalSecondaryCost < IC.getCost())
return true;
return false;
return InliningPreventsSomeOuterInline && TotalSecondaryCost < IC.getCost();
}
static std::basic_ostream<char> &operator<<(std::basic_ostream<char> &R,
@ -479,7 +476,7 @@ shouldInline(CallBase &CS, function_ref<InlineCost(CallBase &CS)> GetInlineCost,
/// Return true if the specified inline history ID
/// indicates an inline history that includes the specified function.
static bool InlineHistoryIncludes(
static bool inlineHistoryIncludes(
Function *F, int InlineHistoryID,
const SmallVectorImpl<std::pair<Function *, int>> &InlineHistory) {
while (InlineHistoryID != -1) {
@ -504,9 +501,9 @@ bool LegacyInlinerBase::runOnSCC(CallGraphSCC &SCC) {
return inlineCalls(SCC);
}
static void emit_inlined_into(OptimizationRemarkEmitter &ORE, DebugLoc &DLoc,
const BasicBlock *Block, const Function &Callee,
const Function &Caller, const InlineCost &IC) {
static void emitInlinedInto(OptimizationRemarkEmitter &ORE, DebugLoc &DLoc,
const BasicBlock *Block, const Function &Callee,
const Function &Caller, const InlineCost &IC) {
ORE.emit([&]() {
bool AlwaysInline = IC.isAlways();
StringRef RemarkName = AlwaysInline ? "AlwaysInline" : "Inlined";
@ -520,8 +517,8 @@ static void setInlineRemark(CallBase &CS, StringRef Message) {
if (!InlineRemarkAttribute)
return;
Attribute attr = Attribute::get(CS.getContext(), "inline-remark", Message);
CS.addAttribute(AttributeList::FunctionIndex, attr);
Attribute Attr = Attribute::get(CS.getContext(), "inline-remark", Message);
CS.addAttribute(AttributeList::FunctionIndex, Attr);
}
static bool
@ -598,10 +595,10 @@ inlineCallsImpl(CallGraphSCC &SCC, CallGraph &CG,
// Now that we have all of the call sites, move the ones to functions in the
// current SCC to the end of the list.
unsigned FirstCallInSCC = CallSites.size();
for (unsigned i = 0; i < FirstCallInSCC; ++i)
if (Function *F = CallSites[i].first->getCalledFunction())
for (unsigned I = 0; I < FirstCallInSCC; ++I)
if (Function *F = CallSites[I].first->getCalledFunction())
if (SCCFunctions.count(F))
std::swap(CallSites[i--], CallSites[--FirstCallInSCC]);
std::swap(CallSites[I--], CallSites[--FirstCallInSCC]);
InlinedArrayAllocasTy InlinedArrayAllocas;
InlineFunctionInfo InlineInfo(&CG, &GetAssumptionCache, PSI);
@ -639,7 +636,7 @@ inlineCallsImpl(CallGraphSCC &SCC, CallGraph &CG,
// infinitely inline.
InlineHistoryID = CallSites[CSi].second;
if (InlineHistoryID != -1 &&
InlineHistoryIncludes(Callee, InlineHistoryID, InlineHistory)) {
inlineHistoryIncludes(Callee, InlineHistoryID, InlineHistory)) {
setInlineRemark(CS, "recursive");
continue;
}
@ -684,7 +681,7 @@ inlineCallsImpl(CallGraphSCC &SCC, CallGraph &CG,
// Attempt to inline the function.
using namespace ore;
InlineResult IR = InlineCallIfPossible(
InlineResult IR = inlineCallIfPossible(
CS, InlineInfo, InlinedArrayAllocas, InlineHistoryID,
InsertLifetime, AARGetter, ImportedFunctionsStats);
if (!IR.isSuccess()) {
@ -701,7 +698,7 @@ inlineCallsImpl(CallGraphSCC &SCC, CallGraph &CG,
}
++NumInlined;
emit_inlined_into(ORE, DLoc, Block, *Callee, *Caller, *OIC);
emitInlinedInto(ORE, DLoc, Block, *Callee, *Caller, *OIC);
// If inlining this function gave us any new call sites, throw them
// onto our worklist to process. They are useful inline candidates.
@ -784,8 +781,8 @@ bool LegacyInlinerBase::inlineCalls(CallGraphSCC &SCC) {
};
return inlineCallsImpl(
SCC, CG, GetAssumptionCache, PSI, GetTLI, InsertLifetime,
[this](CallBase &CS) { return getInlineCost(CS); },
LegacyAARGetter(*this), ImportedFunctionsStats);
[&](CallBase &CS) { return getInlineCost(CS); }, LegacyAARGetter(*this),
ImportedFunctionsStats);
}
/// Remove now-dead linkonce functions at the end of
@ -986,17 +983,17 @@ PreservedAnalyses InlinerPass::run(LazyCallGraph::SCC &InitialC,
// Loop forward over all of the calls. Note that we cannot cache the size as
// inlining can introduce new calls that need to be processed.
for (int i = 0; i < (int)Calls.size(); ++i) {
for (int I = 0; I < (int)Calls.size(); ++I) {
// We expect the calls to typically be batched with sequences of calls that
// have the same caller, so we first set up some shared infrastructure for
// this caller. We also do any pruning we can at this layer on the caller
// alone.
Function &F = *Calls[i].first->getCaller();
Function &F = *Calls[I].first->getCaller();
LazyCallGraph::Node &N = *CG.lookup(F);
if (CG.lookupSCC(N) != C)
continue;
if (F.hasOptNone()) {
setInlineRemark(*Calls[i].first, "optnone attribute");
setInlineRemark(*Calls[I].first, "optnone attribute");
continue;
}
@ -1041,14 +1038,14 @@ PreservedAnalyses InlinerPass::run(LazyCallGraph::SCC &InitialC,
// We bail out as soon as the caller has to change so we can update the
// call graph and prepare the context of that new caller.
bool DidInline = false;
for (; i < (int)Calls.size() && Calls[i].first->getCaller() == &F; ++i) {
for (; I < (int)Calls.size() && Calls[I].first->getCaller() == &F; ++I) {
int InlineHistoryID;
CallBase *CS = nullptr;
std::tie(CS, InlineHistoryID) = Calls[i];
std::tie(CS, InlineHistoryID) = Calls[I];
Function &Callee = *CS->getCalledFunction();
if (InlineHistoryID != -1 &&
InlineHistoryIncludes(&Callee, InlineHistoryID, InlineHistory)) {
inlineHistoryIncludes(&Callee, InlineHistoryID, InlineHistory)) {
setInlineRemark(*CS, "recursive");
continue;
}
@ -1111,7 +1108,7 @@ PreservedAnalyses InlinerPass::run(LazyCallGraph::SCC &InitialC,
++NumInlined;
emit_inlined_into(ORE, DLoc, Block, Callee, F, *OIC);
emitInlinedInto(ORE, DLoc, Block, Callee, F, *OIC);
// Add any new callsites to defined functions to the worklist.
if (!IFI.InlinedCallSites.empty()) {
@ -1152,8 +1149,8 @@ PreservedAnalyses InlinerPass::run(LazyCallGraph::SCC &InitialC,
Callee.removeDeadConstantUsers();
if (Callee.use_empty() && !CG.isLibFunction(Callee)) {
Calls.erase(
std::remove_if(Calls.begin() + i + 1, Calls.end(),
[&Callee](const std::pair<CallBase *, int> &Call) {
std::remove_if(Calls.begin() + I + 1, Calls.end(),
[&](const std::pair<CallBase *, int> &Call) {
return Call.first->getCaller() == &Callee;
}),
Calls.end());
@ -1171,7 +1168,7 @@ PreservedAnalyses InlinerPass::run(LazyCallGraph::SCC &InitialC,
// Back the call index up by one to put us in a good position to go around
// the outer loop.
--i;
--I;
if (!DidInline)
continue;